From 5c8752817cd0b3b515e867ede4569b3c8e280496 Mon Sep 17 00:00:00 2001 From: steffnay Date: Fri, 13 Dec 2019 16:20:08 -0800 Subject: [PATCH 001/333] chore: add initial files for launch --- .../bigquery-storage/CODE_OF_CONDUCT.md | 43 ++++ handwritten/bigquery-storage/CONTRIBUTING.md | 65 ++++++ handwritten/bigquery-storage/LICENSE | 202 ++++++++++++++++++ handwritten/bigquery-storage/README.md | 2 + handwritten/bigquery-storage/synth.py | 42 ++++ 5 files changed, 354 insertions(+) create mode 100644 handwritten/bigquery-storage/CODE_OF_CONDUCT.md create mode 100644 handwritten/bigquery-storage/CONTRIBUTING.md create mode 100644 handwritten/bigquery-storage/LICENSE create mode 100644 handwritten/bigquery-storage/README.md create mode 100644 handwritten/bigquery-storage/synth.py diff --git a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..46b2a08ea6d --- /dev/null +++ b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/handwritten/bigquery-storage/CONTRIBUTING.md b/handwritten/bigquery-storage/CONTRIBUTING.md new file mode 100644 index 00000000000..f6c4cf010e3 --- /dev/null +++ b/handwritten/bigquery-storage/CONTRIBUTING.md @@ -0,0 +1,65 @@ +# How to become a contributor and submit your own code + +**Table of contents** + +* [Contributor License Agreements](#contributor-license-agreements) +* [Contributing a patch](#contributing-a-patch) +* [Running the tests](#running-the-tests) +* [Releasing the library](#releasing-the-library) + +## Contributor License Agreements + +We'd love to accept your sample apps and patches! Before we can take them, we +have to jump a couple of legal hurdles. + +Please fill out either the individual or corporate Contributor License Agreement +(CLA). + + * If you are an individual writing original source code and you're sure you + own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). + * If you work for a company that wants to allow you to contribute your work, + then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). + +Follow either of the two links above to access the appropriate CLA and +instructions for how to sign and return it. Once we receive it, we'll be able to +accept your pull requests. + +## Contributing A Patch + +1. Submit an issue describing your proposed change to the repo in question. +1. The repo owner will respond to your issue promptly. +1. If your proposed change is accepted, and you haven't already done so, sign a + Contributor License Agreement (see details above). +1. Fork the desired repo, develop and test your code changes. +1. Ensure that your code adheres to the existing style in the code to which + you are contributing. +1. Ensure that your code has an appropriate set of tests which all pass. +1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. +1. Submit a pull request. + +## Running the tests + +1. [Prepare your environment for Node.js setup][setup]. + +1. Install dependencies: + + npm install + +1. Run the tests: + + # Run unit tests. + npm test + + # Run sample integration tests. + gcloud auth application-default login + npm run samples-test + + # Run all system tests. + gcloud auth application-default login + npm run system-test + +1. Lint (and maybe fix) any changes: + + npm run fix + +[setup]: https://cloud.google.com/nodejs/docs/setup diff --git a/handwritten/bigquery-storage/LICENSE b/handwritten/bigquery-storage/LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/handwritten/bigquery-storage/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md new file mode 100644 index 00000000000..a966ebe8497 --- /dev/null +++ b/handwritten/bigquery-storage/README.md @@ -0,0 +1,2 @@ +# Node.js Client for BigQuery Storage API +This is a generated README.md placeholder. Put your own documentation here. \ No newline at end of file diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py new file mode 100644 index 00000000000..a45d81d2398 --- /dev/null +++ b/handwritten/bigquery-storage/synth.py @@ -0,0 +1,42 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This script is used to synthesize generated parts of this library.""" + +import synthtool as s +import synthtool.gcp as gcp +import subprocess +import logging + +logging.basicConfig(level=logging.DEBUG) + +# run the gapic generator +gapic = gcp.GAPICGenerator() +versions = ['v1beta1'] +for version in versions: + library = gapic.node_library( + 'bigquery_storage', + version, + config_path="/google/cloud/bigquery/storage/" "artman_bigquerystorage_v1beta1.yaml", + artman_output_name="bigquerystorage-v1beta1", + include_protos=True,) + s.copy(library, excludes=[]) + +# Copy common templates +common_templates = gcp.CommonTemplates() +templates = common_templates.node_library() +s.copy(templates, excludes=[]) + +# Node.js specific cleanup +subprocess.run(['npm', 'install']) +subprocess.run(['npm', 'run', 'fix']) \ No newline at end of file From 29f464dda253effeace49fd52074645e49ed875f Mon Sep 17 00:00:00 2001 From: steffnay Date: Wed, 8 Jan 2020 11:17:14 -0800 Subject: [PATCH 002/333] updated build file --- handwritten/bigquery-storage/synth.py | 34 +++++++++++++-------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index a45d81d2398..f9620996bce 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,31 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. """This script is used to synthesize generated parts of this library.""" - import synthtool as s import synthtool.gcp as gcp import subprocess import logging - logging.basicConfig(level=logging.DEBUG) - -# run the gapic generator -gapic = gcp.GAPICGenerator() +# Run the gapic generator +gapic = gcp.GAPICMicrogenerator() +name = 'bigquerystorage' versions = ['v1beta1'] for version in versions: - library = gapic.node_library( - 'bigquery_storage', - version, - config_path="/google/cloud/bigquery/storage/" "artman_bigquerystorage_v1beta1.yaml", - artman_output_name="bigquerystorage-v1beta1", - include_protos=True,) - s.copy(library, excludes=[]) - + library = gapic.typescript_library( + name, + version, + proto_path=f'google/cloud/bigquery/storage/{version}', + generator_args={ + 'grpc-service-config': f'google/cloud/bigquery/storage/{version}/{name}_grpc_service_config.json', + 'package-name': f'@google-cloud/bigquery-storage', + }, + ) + s.copy(library, excludes=['README.md']) # Copy common templates common_templates = gcp.CommonTemplates() -templates = common_templates.node_library() +templates = common_templates.node_library(source_location='build/src') s.copy(templates, excludes=[]) - # Node.js specific cleanup subprocess.run(['npm', 'install']) -subprocess.run(['npm', 'run', 'fix']) \ No newline at end of file +subprocess.run(['npm', 'run', 'fix']) +subprocess.run(['npx', 'compileProtos', 'src']) \ No newline at end of file From f2f0c5922585660b3ae83dfaa56785efde096618 Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Thu, 20 Feb 2020 13:10:03 -0800 Subject: [PATCH 003/333] feat!: initial generation of library (#1) * initial generation * feat: add initial functionality * update quickstart * quickstart * regenerates library and adds quickstart * docs: updates README and samples/README * docs: update metadata, dependencies, & quickstart * chore: rerun synthtool * chore: rerun synthtool * test: updates tests * test: linted * chore: does advice actually work Co-authored-by: Benjamin E. Coe --- handwritten/bigquery-storage/.eslintignore | 5 + handwritten/bigquery-storage/.eslintrc.yml | 15 + .../.github/ISSUE_TEMPLATE/bug_report.md | 33 + .../.github/ISSUE_TEMPLATE/feature_request.md | 18 + .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../.github/release-please.yml | 1 + handwritten/bigquery-storage/.gitignore | 14 + handwritten/bigquery-storage/.jsdoc.js | 48 + .../bigquery-storage/.kokoro/common.cfg | 24 + .../.kokoro/continuous/node10/common.cfg | 34 + .../.kokoro/continuous/node10/docs.cfg | 4 + .../.kokoro/continuous/node10/lint.cfg | 4 + .../continuous/node10/samples-test.cfg | 7 + .../.kokoro/continuous/node10/system-test.cfg | 7 + .../.kokoro/continuous/node10/test.cfg | 9 + .../.kokoro/continuous/node12/common.cfg | 24 + .../.kokoro/continuous/node12/test.cfg | 0 .../.kokoro/continuous/node8/common.cfg | 24 + .../.kokoro/continuous/node8/test.cfg | 0 handwritten/bigquery-storage/.kokoro/docs.sh | 25 + handwritten/bigquery-storage/.kokoro/lint.sh | 33 + .../.kokoro/presubmit/node10/common.cfg | 34 + .../.kokoro/presubmit/node10/docs.cfg | 4 + .../.kokoro/presubmit/node10/lint.cfg | 4 + .../.kokoro/presubmit/node10/samples-test.cfg | 7 + .../.kokoro/presubmit/node10/system-test.cfg | 7 + .../.kokoro/presubmit/node10/test.cfg | 0 .../.kokoro/presubmit/node12/common.cfg | 24 + .../.kokoro/presubmit/node12/test.cfg | 0 .../.kokoro/presubmit/node8/common.cfg | 24 + .../.kokoro/presubmit/node8/test.cfg | 0 .../.kokoro/presubmit/windows/common.cfg | 2 + .../.kokoro/presubmit/windows/test.cfg | 2 + .../bigquery-storage/.kokoro/publish.sh | 31 + .../bigquery-storage/.kokoro/release/docs.cfg | 26 + .../bigquery-storage/.kokoro/release/docs.sh | 50 + .../.kokoro/release/publish.cfg | 74 + .../bigquery-storage/.kokoro/samples-test.sh | 57 + .../bigquery-storage/.kokoro/system-test.sh | 49 + handwritten/bigquery-storage/.kokoro/test.bat | 33 + handwritten/bigquery-storage/.kokoro/test.sh | 37 + .../bigquery-storage/.kokoro/trampoline.sh | 27 + handwritten/bigquery-storage/.mocharc.json | 5 + handwritten/bigquery-storage/.nycrc | 24 + handwritten/bigquery-storage/.prettierignore | 3 + handwritten/bigquery-storage/.prettierrc | 8 + .../bigquery-storage/.repo-metadata.json | 12 + handwritten/bigquery-storage/README.md | 230 +- handwritten/bigquery-storage/codecov.yaml | 4 + .../bigquery-storage/linkinator.config.json | 11 + handwritten/bigquery-storage/package.json | 56 + .../bigquery/storage/v1beta1/arrow.proto | 37 + .../cloud/bigquery/storage/v1beta1/avro.proto | 38 + .../storage/v1beta1/read_options.proto | 41 + .../bigquery/storage/v1beta1/storage.proto | 405 + .../storage/v1beta1/table_reference.proto | 43 + .../bigquery-storage/protos/protos.d.ts | 6137 ++++++ handwritten/bigquery-storage/protos/protos.js | 15841 ++++++++++++++++ .../bigquery-storage/protos/protos.json | 1532 ++ handwritten/bigquery-storage/src/index.ts | 24 + .../src/v1beta1/big_query_storage_client.ts | 833 + .../big_query_storage_client_config.json | 54 + .../v1beta1/big_query_storage_proto_list.json | 7 + .../bigquery-storage/src/v1beta1/index.ts | 19 + handwritten/bigquery-storage/synth.metadata | 38 + handwritten/bigquery-storage/synth.py | 2 +- .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 25 + .../bigquery-storage/system-test/install.ts | 51 + .../test/gapic-big_query_storage-v1beta1.ts | 372 + handwritten/bigquery-storage/tsconfig.json | 19 + handwritten/bigquery-storage/tslint.json | 3 + .../bigquery-storage/webpack.config.js | 64 + 74 files changed, 26797 insertions(+), 3 deletions(-) create mode 100644 handwritten/bigquery-storage/.eslintignore create mode 100644 handwritten/bigquery-storage/.eslintrc.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 handwritten/bigquery-storage/.github/release-please.yml create mode 100644 handwritten/bigquery-storage/.gitignore create mode 100644 handwritten/bigquery-storage/.jsdoc.js create mode 100644 handwritten/bigquery-storage/.kokoro/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/docs.sh create mode 100755 handwritten/bigquery-storage/.kokoro/lint.sh create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/publish.sh create mode 100644 handwritten/bigquery-storage/.kokoro/release/docs.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/release/docs.sh create mode 100644 handwritten/bigquery-storage/.kokoro/release/publish.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/samples-test.sh create mode 100755 handwritten/bigquery-storage/.kokoro/system-test.sh create mode 100644 handwritten/bigquery-storage/.kokoro/test.bat create mode 100755 handwritten/bigquery-storage/.kokoro/test.sh create mode 100755 handwritten/bigquery-storage/.kokoro/trampoline.sh create mode 100644 handwritten/bigquery-storage/.mocharc.json create mode 100644 handwritten/bigquery-storage/.nycrc create mode 100644 handwritten/bigquery-storage/.prettierignore create mode 100644 handwritten/bigquery-storage/.prettierrc create mode 100644 handwritten/bigquery-storage/.repo-metadata.json create mode 100644 handwritten/bigquery-storage/codecov.yaml create mode 100644 handwritten/bigquery-storage/linkinator.config.json create mode 100644 handwritten/bigquery-storage/package.json create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto create mode 100644 handwritten/bigquery-storage/protos/protos.d.ts create mode 100644 handwritten/bigquery-storage/protos/protos.js create mode 100644 handwritten/bigquery-storage/protos/protos.json create mode 100644 handwritten/bigquery-storage/src/index.ts create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/index.ts create mode 100644 handwritten/bigquery-storage/synth.metadata create mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js create mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts create mode 100644 handwritten/bigquery-storage/system-test/install.ts create mode 100644 handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts create mode 100644 handwritten/bigquery-storage/tsconfig.json create mode 100644 handwritten/bigquery-storage/tslint.json create mode 100644 handwritten/bigquery-storage/webpack.config.js diff --git a/handwritten/bigquery-storage/.eslintignore b/handwritten/bigquery-storage/.eslintignore new file mode 100644 index 00000000000..09b31fe735a --- /dev/null +++ b/handwritten/bigquery-storage/.eslintignore @@ -0,0 +1,5 @@ +**/node_modules +src/**/doc/* +build/ +docs/ +protos/ diff --git a/handwritten/bigquery-storage/.eslintrc.yml b/handwritten/bigquery-storage/.eslintrc.yml new file mode 100644 index 00000000000..73eeec27612 --- /dev/null +++ b/handwritten/bigquery-storage/.eslintrc.yml @@ -0,0 +1,15 @@ +--- +extends: + - 'eslint:recommended' + - 'plugin:node/recommended' + - prettier +plugins: + - node + - prettier +rules: + prettier/prettier: error + block-scoped-var: error + eqeqeq: error + no-warning-comments: warn + no-var: error + prefer-const: error diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..5adacf4591c --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,33 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery-storage/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node + - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS: + - Node.js version: + - npm version: + - `@google-cloud/bigquery-storage` version: + +#### Steps to reproduce + + 1. ? + 2. ? + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000000..6365857f33c --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 00000000000..99586903212 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..19153139702 --- /dev/null +++ b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/handwritten/bigquery-storage/.github/release-please.yml b/handwritten/bigquery-storage/.github/release-please.yml new file mode 100644 index 00000000000..85344b92c7f --- /dev/null +++ b/handwritten/bigquery-storage/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: node diff --git a/handwritten/bigquery-storage/.gitignore b/handwritten/bigquery-storage/.gitignore new file mode 100644 index 00000000000..5d32b23782f --- /dev/null +++ b/handwritten/bigquery-storage/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js new file mode 100644 index 00000000000..719a77bf2e9 --- /dev/null +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -0,0 +1,48 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2018 Google, LLC.', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-storage', + theme: 'lumen' + }, + markdown: { + idInHeadings: true + } +}; diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg new file mode 100644 index 00000000000..81699465317 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg new file mode 100644 index 00000000000..e9656f0edfc --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg new file mode 100644 index 00000000000..5972e5b337c --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/docs.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg new file mode 100644 index 00000000000..0a5d546b96b --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg new file mode 100644 index 00000000000..68b02101fc1 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg new file mode 100644 index 00000000000..3ccb29d69f8 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg new file mode 100644 index 00000000000..468b8c7197a --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg @@ -0,0 +1,9 @@ +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg new file mode 100644 index 00000000000..6df937a77d6 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg new file mode 100644 index 00000000000..d9c4fb600d5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/docs.sh b/handwritten/bigquery-storage/.kokoro/docs.sh new file mode 100755 index 00000000000..952403faede --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/docs.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +cd $(dirname $0)/.. + +npm install + +npm run docs-test diff --git a/handwritten/bigquery-storage/.kokoro/lint.sh b/handwritten/bigquery-storage/.kokoro/lint.sh new file mode 100755 index 00000000000..b03cb0439a6 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/lint.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +cd $(dirname $0)/.. + +npm install + +# Install and link samples +if [ -f samples/package.json ]; then + cd samples/ + npm link ../ + npm install + cd .. +fi + +npm run lint diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg new file mode 100644 index 00000000000..e9656f0edfc --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg new file mode 100644 index 00000000000..5972e5b337c --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/docs.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg new file mode 100644 index 00000000000..0a5d546b96b --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg new file mode 100644 index 00000000000..68b02101fc1 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg new file mode 100644 index 00000000000..3ccb29d69f8 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg new file mode 100644 index 00000000000..6df937a77d6 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg new file mode 100644 index 00000000000..d9c4fb600d5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg new file mode 100644 index 00000000000..d6e25e0b1b8 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg @@ -0,0 +1,2 @@ +# Format: //devtools/kokoro/config/proto/build.proto + diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg new file mode 100644 index 00000000000..83de067d5f1 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg @@ -0,0 +1,2 @@ +# Use the test file directly +build_file: "nodejs-bigquery-storage/.kokoro/test.bat" diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh new file mode 100755 index 00000000000..ff9c262295a --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +cd $(dirname $0)/.. + +NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-bigquery-storage-npm-token) +echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc + +npm install +npm publish --access=public --registry=https://wombat-dressing-room.appspot.com diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg new file mode 100644 index 00000000000..88eb54b1cd4 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/release/docs.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh new file mode 100755 index 00000000000..4d3a0868531 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# build jsdocs (Python is installed on the Node 10 docker image). +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=/home/node/.npm-global + export PATH="$PATH:/home/node/.npm-global/bin" + cd $(dirname $0)/../.. +fi +npm install +npm run docs + +# create docs.metadata, based on package.json and .repo-metadata.json. +npm i json@9.0.6 -g +python3 -m pip install --user gcp-docuploader +python3 -m docuploader create-metadata \ + --name=$(cat .repo-metadata.json | json name) \ + --version=$(cat package.json | json version) \ + --language=$(cat .repo-metadata.json | json language) \ + --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ + --product-page=$(cat .repo-metadata.json | json product_documentation) \ + --github-repository=$(cat .repo-metadata.json | json repo) \ + --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) +cp docs.metadata ./docs/docs.metadata + +# deploy the docs. +if [[ -z "$CREDENTIALS" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi +if [[ -z "$BUCKET" ]]; then + BUCKET=docs-staging +fi +python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg new file mode 100644 index 00000000000..5531834c7ba --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -0,0 +1,74 @@ +# Get npm token from Keystore +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_npm_token" + backend_type: FASTCONFIGPUSH + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# Fetch magictoken to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "releasetool-magictoken" + } + } +} + +# Fetch api key to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "magic-github-proxy-api-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-bigquery-storage-npm-token" + } + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/publish.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh new file mode 100755 index 00000000000..20e3241c9e9 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +export GCLOUD_PROJECT=long-door-651 + +cd $(dirname $0)/.. + +# Run a pre-test hook, if a pre-samples-test.sh is in the project +if [ -f .kokoro/pre-samples-test.sh ]; then + set +x + . .kokoro/pre-samples-test.sh + set -x +fi + +if [ -f samples/package.json ]; then + npm install + + # Install and link samples + cd samples/ + npm link ../ + npm install + cd .. + + npm run samples-test +fi + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh new file mode 100755 index 00000000000..fc5824e6667 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +export GCLOUD_PROJECT=long-door-651 + +cd $(dirname $0)/.. + +# Run a pre-test hook, if a pre-system-test.sh is in the project +if [ -f .kokoro/pre-system-test.sh ]; then + set +x + . .kokoro/pre-system-test.sh + set -x +fi + +npm install + +npm run system-test + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/handwritten/bigquery-storage/.kokoro/test.bat b/handwritten/bigquery-storage/.kokoro/test.bat new file mode 100644 index 00000000000..ae59e59be3e --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/test.bat @@ -0,0 +1,33 @@ +@rem Copyright 2018 Google LLC. All rights reserved. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem http://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. + +@echo "Starting Windows build" + +cd /d %~dp0 +cd .. + +@rem npm path is not currently set in our image, we should fix this next time +@rem we upgrade Node.js in the image: +SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm + +call nvm use v12.14.1 +call which node + +call npm install || goto :error +call npm run test || goto :error + +goto :EOF + +:error +exit /b 1 diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh new file mode 100755 index 00000000000..9db11bb09d6 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=/home/node/.npm-global + +cd $(dirname $0)/.. + +npm install +npm test + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=10 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/handwritten/bigquery-storage/.kokoro/trampoline.sh b/handwritten/bigquery-storage/.kokoro/trampoline.sh new file mode 100755 index 00000000000..9bd4905c4b5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/trampoline.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT + +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/handwritten/bigquery-storage/.mocharc.json b/handwritten/bigquery-storage/.mocharc.json new file mode 100644 index 00000000000..670c5e2c24b --- /dev/null +++ b/handwritten/bigquery-storage/.mocharc.json @@ -0,0 +1,5 @@ +{ + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} diff --git a/handwritten/bigquery-storage/.nycrc b/handwritten/bigquery-storage/.nycrc new file mode 100644 index 00000000000..b18d5472b62 --- /dev/null +++ b/handwritten/bigquery-storage/.nycrc @@ -0,0 +1,24 @@ +{ + "report-dir": "./.coverage", + "reporter": ["text", "lcov"], + "exclude": [ + "**/*-test", + "**/.coverage", + "**/apis", + "**/benchmark", + "**/conformance", + "**/docs", + "**/samples", + "**/scripts", + "**/protos", + "**/test", + "**/*.d.ts", + ".jsdoc.js", + "**/.jsdoc.js", + "karma.conf.js", + "webpack-tests.config.js", + "webpack.config.js" + ], + "exclude-after-remap": false, + "all": true +} diff --git a/handwritten/bigquery-storage/.prettierignore b/handwritten/bigquery-storage/.prettierignore new file mode 100644 index 00000000000..f6fac98b0a8 --- /dev/null +++ b/handwritten/bigquery-storage/.prettierignore @@ -0,0 +1,3 @@ +node_modules/* +samples/node_modules/* +src/**/doc/* diff --git a/handwritten/bigquery-storage/.prettierrc b/handwritten/bigquery-storage/.prettierrc new file mode 100644 index 00000000000..df6eac07446 --- /dev/null +++ b/handwritten/bigquery-storage/.prettierrc @@ -0,0 +1,8 @@ +--- +bracketSpacing: false +printWidth: 80 +semi: true +singleQuote: true +tabWidth: 2 +trailingComma: es5 +useTabs: false diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json new file mode 100644 index 00000000000..e417e6a9678 --- /dev/null +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -0,0 +1,12 @@ +{ + "name": "bigquerystorage", + "name_pretty": "Google BigQuery Storage", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", + "issue_tracker": "https://b.corp.google.com/savedsearches/559654", + "release_level": "beta", + "language": "nodejs", + "repo": "googleapis/nodejs-bigquery-storage", + "distribution_name": "@google-cloud/bigquery-storage", + "api_id": "bigquerystorage.googleapis.com", + "requires_billing": true + } diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index a966ebe8497..21c1c791ab3 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -1,2 +1,228 @@ -# Node.js Client for BigQuery Storage API -This is a generated README.md placeholder. Put your own documentation here. \ No newline at end of file +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/nodejs-bigquery-storage) + +[![release level](https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) +[![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery-storage/master.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery-storage) + + + + +Client for the BigQuery Storage API + + + +* [Google BigQuery Storage Documentation][product-docs] +* [github.com/googleapis/nodejs-bigquery-storage](https://github.com/googleapis/nodejs-bigquery-storage) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + * [Before you begin](#before-you-begin) + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable billing for your project][billing]. +1. [Enable the Google BigQuery Storage API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + +### Installing the client library + +```bash +npm install @google-cloud/bigquery-storage +``` + + +### Using the client library + +```javascript + + // The read stream contains blocks of Avro-encoded bytes. We use the + // 'avsc' library to decode these blocks. Install avsc with the following + // command: npm install avsc + const avro = require('avsc'); + + // See reference documentation at + // https://cloud.google.com/bigquery/docs/reference/storage + const bqStorage = require('@google-cloud/bigquery-storage').v1beta1 + .BigQueryStorageClient; + + const client = new bqStorage(); + + async function bigqueryStorageQuickstart() { + // Get current project ID. The read session is created in this project. + // This project can be different from that which contains the table. + const myProjectId = await client.getProjectId(); + + // This example reads baby name data from the public datasets. + const projectId = 'bigquery-public-data'; + const datasetId = 'usa_names'; + const tableId = 'usa_1910_current'; + + const tableReference = { + projectId, + datasetId, + tableId, + }; + + const parent = `projects/${myProjectId}`; + + /* We limit the output columns to a subset of those allowed in the table, + * and set a simple filter to only report names from the state of + * Washington (WA). + */ + const readOptions = { + selectedFields: ['name', 'number', 'state'], + rowRestriction: 'state = "WA"', + }; + + let tableModifiers = null; + const snapshotSeconds = 0; + + // Set a snapshot time if it's been specified. + if (snapshotSeconds > 0) { + tableModifiers = {snapshotTime: {seconds: snapshotSeconds}}; + } + + // API request. + const request = { + tableReference, + parent, + readOptions, + tableModifiers, + // This API can also deliver data serialized in Apache Arrow format. + // This example leverages Apache Avro. + format: 'AVRO', + /* We use a LIQUID strategy in this example because we only read from a + * single stream. Consider BALANCED if you're consuming multiple streams + * concurrently and want more consistent stream sizes. + */ + shardingStrategy: 'LIQUID', + }; + + const [session] = await client.createReadSession(request); + + const schema = JSON.parse(session.avroSchema.schema); + + const avroType = avro.Type.forSchema(schema); + + /* The offset requested must be less than the last + * row read from ReadRows. Requesting a larger offset is + * undefined. + */ + let offset = 0; + + const readRowsRequest = { + // Optional stream name or offset. Offset requested must be less than the last + // row read from readRows(). Requesting a larger offset is undefined. + readPosition: { + stream: session.streams[0], + offset, + }, + }; + + const names = new Set(); + const states = {}; + + /* We'll use only a single stream for reading data from the table. Because + * of dynamic sharding, this will yield all the rows in the table. However, + * if you wanted to fan out multiple readers you could do so by having a + * reader process each individual stream. + */ + client + .readRows(readRowsRequest) + .on('error', console.error) + .on('data', function(data) { + try { + const decodedData = avroType.decode( + data.avroRows.serializedBinaryRows + ); + + names.add(decodedData.value.name); + + if (!states[decodedData.value.state]) { + states[decodedData.value.state] = true; + } + + offset = decodedData.offset; + } catch (error) { + console.log(error); + } + }) + .on('end', function() { + console.log( + `Got ${names.size} unique names in states: ${Object.keys(states)}` + ); + console.log(`Last offset: ${offset}`); + }); + } + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples) directory. The samples' `README.md` +has instructions for running the samples. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be in **beta**. This means it is expected to be +mostly stable while we work toward a general availability release; however, +complete stability is not guaranteed. We will address issues and requests +against beta libraries with a high priority. + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/CONTRIBUTING.md). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/LICENSE) + + +[product-docs]: https://cloud.google.com/bigquery/docs/reference/storage +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/handwritten/bigquery-storage/codecov.yaml b/handwritten/bigquery-storage/codecov.yaml new file mode 100644 index 00000000000..5724ea9478d --- /dev/null +++ b/handwritten/bigquery-storage/codecov.yaml @@ -0,0 +1,4 @@ +--- +codecov: + ci: + - source.cloud.google.com diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json new file mode 100644 index 00000000000..a4a25c7baee --- /dev/null +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -0,0 +1,11 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://www.npmjs.org/package/@google-cloud/bigquery-storage", + "https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples", + "https://github.com/googleapis/nodejs-bigquery-storage/blob/master/samples/quickstart.js" + ] +} diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json new file mode 100644 index 00000000000..90fd02d8399 --- /dev/null +++ b/handwritten/bigquery-storage/package.json @@ -0,0 +1,56 @@ +{ + "name": "@google-cloud/bigquery-storage", + "version": "0.1.0", + "description": "Client for the BigQuery Storage API", + "repository": "googleapis/nodejs-bigquery-storage", + "license": "Apache-2.0", + "author": "Google LLC", + "files": [ + "build/src", + "build/protos" + ], + "main": "build/src/index.js", + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix && eslint samples --fix", + "prelint": "cd samples; npm link ../; npm i", + "lint": "gts check && eslint samples", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../" + }, + "dependencies": { + "google-gax": "^1.14.1" + }, + "devDependencies": { + "@types/mocha": "^7.0.1", + "@types/node": "^13.7.1", + "c8": "^7.1.0", + "gts": "^1.1.2", + "jsdoc": "^3.6.3", + "jsdoc-fresh": "^1.0.2", + "jsdoc-region-tag": "^1.0.4", + "linkinator": "^2.0.1", + "mocha": "^7.0.1", + "pack-n-play": "^1.0.0-2", + "null-loader": "^3.0.0", + "ts-loader": "^6.2.1", + "typescript": "~3.7.5", + "webpack": "^4.41.6", + "webpack-cli": "^3.3.11", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.0", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", + "prettier": "^1.19.1" + }, + "engines": { + "node": ">=8.13.0" + } +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto new file mode 100644 index 00000000000..3003de444c2 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -0,0 +1,37 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "ArrowProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Arrow schema. +message ArrowSchema { + // IPC serialized Arrow schema. + bytes serialized_schema = 1; +} + +// Arrow RecordBatch. +message ArrowRecordBatch { + // IPC serialized Arrow RecordBatch. + bytes serialized_record_batch = 1; + + // The count of rows in the returning block. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto new file mode 100644 index 00000000000..021d8e44f9f --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -0,0 +1,38 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "AvroProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Avro schema. +message AvroSchema { + // Json serialized schema, as described at + // https://avro.apache.org/docs/1.8.1/spec.html + string schema = 1; +} + +// Avro rows. +message AvroRows { + // Binary serialized rows in a block. + bytes serialized_binary_rows = 1; + + // The count of rows in the returning block. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto new file mode 100644 index 00000000000..9591deba7f4 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Options dictating how we read a table. +message TableReadOptions { + // Optional. Names of the fields in the table that should be read. If empty, + // all fields will be read. If the specified field is a nested field, all the + // sub-fields in the field will be selected. The output field order is + // unrelated to the order of fields in selected_fields. + repeated string selected_fields = 1; + + // Optional. SQL text filtering statement, similar to a WHERE clause in + // a query. Currently, only a single predicate that is a comparison between + // a column and a constant value is supported. Aggregates are not supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + string row_restriction = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto new file mode 100644 index 00000000000..22f742fbb65 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -0,0 +1,405 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; +import "google/cloud/bigquery/storage/v1beta1/avro.proto"; +import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; +import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// BigQuery storage API. +// +// The BigQuery storage API can be used to read data stored in BigQuery. +service BigQueryStorage { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.readonly," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Read sessions automatically expire 24 hours after they are created and do + // not require manual clean-up by the caller. + rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { + option (google.api.http) = { + post: "/v1beta1/{table_reference.project_id=projects/*}" + body: "*" + additional_bindings { + post: "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}" + body: "*" + } + }; + option (google.api.method_signature) = "table_reference,parent,requested_streams"; + } + + // Reads rows from the table in the format prescribed by the read session. + // Each response contains one or more table rows, up to a maximum of 10 MiB + // per response; read requests which attempt to read individual rows larger + // than this will fail. + // + // Each request also returns a set of stream statistics reflecting the + // estimated total number of rows in the read stream. This number is computed + // based on the total table size and the number of active streams in the read + // session, and may change as other streams continue to read data. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { + get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" + }; + option (google.api.method_signature) = "read_position"; + } + + // Creates additional streams for a ReadSession. This API can be used to + // dynamically adjust the parallelism of a batch processing task upwards by + // adding additional workers. + rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) { + option (google.api.http) = { + post: "/v1beta1/{session.name=projects/*/sessions/*}" + body: "*" + }; + option (google.api.method_signature) = "session,requested_streams"; + } + + // Triggers the graceful termination of a single stream in a ReadSession. This + // API can be used to dynamically adjust the parallelism of a batch processing + // task downwards without losing data. + // + // This API does not delete the stream -- it remains visible in the + // ReadSession, and any data processed by the stream is not released to other + // streams. However, no additional data will be assigned to the stream once + // this call completes. Callers must continue reading data on the stream until + // the end of the stream is reached so that data which has already been + // assigned to the stream will be processed. + // + // This method will return an error if there are no other live streams + // in the Session, or if SplitReadStream() has been called on the given + // Stream. + rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1beta1/{stream.name=projects/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "stream"; + } + + // Splits a given read stream into two Streams. These streams are referred to + // as the primary and the residual of the split. The original stream can still + // be read from in the same manner as before. Both of the returned streams can + // also be read from, and the total rows return by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back to back in the + // original Stream. Concretely, it is guaranteed that for streams Original, + // Primary, and Residual, that Original[0-j] = Primary[0-j] and + // Original[j-n] = Residual[0-m] once the streams have been read to + // completion. + // + // This method is guaranteed to be idempotent. + rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + option (google.api.http) = { + get: "/v1beta1/{original_stream.name=projects/*/streams/*}" + }; + option (google.api.method_signature) = "original_stream"; + } +} + +// Information about a single data stream within a read session. +message Stream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/Stream" + pattern: "projects/{project}/locations/{location}/streams/{stream}" + }; + + // Name of the stream, in the form + // `projects/{project_id}/locations/{location}/streams/{stream_id}`. + string name = 1; +} + +// Expresses a point within a given stream using an offset position. +message StreamPosition { + // Identifier for a given Stream. + Stream stream = 1; + + // Position in the stream. + int64 offset = 2; +} + +// Information returned from a `CreateReadSession` request. +message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + + // Unique identifier for the session, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}`. + string name = 1; + + // Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. + google.protobuf.Timestamp expire_time = 2; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. + oneof schema { + // Avro schema. + AvroSchema avro_schema = 5; + + // Arrow schema. + ArrowSchema arrow_schema = 6; + } + + // Streams associated with this session. + repeated Stream streams = 4; + + // Table that this ReadSession is reading from. + TableReference table_reference = 7; + + // Any modifiers which are applied when reading from the specified table. + TableModifiers table_modifiers = 8; + + // The strategy to use for distributing data among the streams. + ShardingStrategy sharding_strategy = 9; +} + +// Creates a new read session, which may include additional options such as +// requested parallelism, projection filters and constraints. +message CreateReadSessionRequest { + // Required. Reference to the table to read. + TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. String of the form `projects/{project_id}` indicating the + // project this ReadSession is associated with. This is the project that will + // be billed for usage. + string parent = 6 [(google.api.field_behavior) = REQUIRED]; + + // Any modifiers to the Table (e.g. snapshot timestamp). + TableModifiers table_modifiers = 2; + + // Initial number of streams. If unset or 0, we will + // provide a value of streams so as to produce reasonable throughput. Must be + // non-negative. The number of streams may be lower than the requested number, + // depending on the amount parallelism that is reasonable for the table and + // the maximum amount of parallelism allowed by the system. + // + // Streams must be read starting from offset 0. + int32 requested_streams = 3; + + // Read options for this session (e.g. column selection, filters). + TableReadOptions read_options = 4; + + // Data output format. Currently default to Avro. + DataFormat format = 5; + + // The strategy to use for distributing data among multiple streams. Currently + // defaults to liquid sharding. + ShardingStrategy sharding_strategy = 7; +} + +// Data format for input or output data. +enum DataFormat { + // Data format is unspecified. + DATA_FORMAT_UNSPECIFIED = 0; + + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + AVRO = 1; + + ARROW = 3; +} + +// Strategy for distributing data among multiple streams in a read session. +enum ShardingStrategy { + // Same as LIQUID. + SHARDING_STRATEGY_UNSPECIFIED = 0; + + // Assigns data to each stream based on the client's read rate. The faster the + // client reads from a stream, the more data is assigned to the stream. In + // this strategy, it's possible to read all data from a single stream even if + // there are other streams present. + LIQUID = 1; + + // Assigns data to each stream such that roughly the same number of rows can + // be read from each stream. Because the server-side unit for assigning data + // is collections of rows, the API does not guarantee that each stream will + // return the same number or rows. Additionally, the limits are enforced based + // on the number of pre-filtering rows, so some filters can lead to lopsided + // assignments. + BALANCED = 2; +} + +// Requesting row data via `ReadRows` must provide Stream position information. +message ReadRowsRequest { + // Required. Identifier of the position in the stream to start reading from. + // The offset requested must be less than the last row read from ReadRows. + // Requesting a larger offset is undefined. + StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Progress information for a given Stream. +message StreamStatus { + // Number of estimated rows in the current stream. May change over time as + // different readers in the stream progress at rates which are relatively fast + // or slow. + int64 estimated_row_count = 1; + + // A value in the range [0.0, 1.0] that represents the fraction of rows + // assigned to this stream that have been processed by the server. In the + // presence of read filters, the server may process more rows than it returns, + // so this value reflects progress through the pre-filtering rows. + // + // This value is only populated for sessions created through the BALANCED + // sharding strategy. + float fraction_consumed = 2; + + // Represents the progress of the current stream. + // + // Note: This value is under development and should not be used. Use + // `fraction_consumed` instead. + Progress progress = 4; + + // Whether this stream can be split. For sessions that use the LIQUID sharding + // strategy, this value is always false. For BALANCED sessions, this value is + // false when enough data have been read such that no more splits are possible + // at that point or beyond. For small tables or streams that are the result of + // a chain of splits, this value may never be true. + bool is_splittable = 3; +} + +message Progress { + // The fraction of rows assigned to the stream that have been processed by the + // server so far, not including the rows in the current response message. + // + // This value, along with `at_response_end`, can be used to interpolate the + // progress made as the rows in the message are being processed using the + // following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the `at_response_start` + // value of the current response. + float at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the rows in + // the current response. + float at_response_end = 2; +} + +// Information on if the current connection is being throttled. +message ThrottleStatus { + // How much this connection is being throttled. + // 0 is no throttling, 100 is completely throttled. + int32 throttle_percent = 1; +} + +// Response from calling `ReadRows` may include row data, progress and +// throttling information. +message ReadRowsResponse { + // Row data is returned in format specified during session creation. + oneof rows { + // Serialized row data in AVRO format. + AvroRows avro_rows = 3; + + // Serialized row data in Arrow RecordBatch format. + ArrowRecordBatch arrow_record_batch = 4; + } + + // Number of serialized rows in the rows block. This value is recorded here, + // in addition to the row_count values in the output-specific messages in + // `rows`, so that code which needs to record progress through the stream can + // do so in an output format-independent way. + int64 row_count = 6; + + // Estimated stream statistics. + StreamStatus status = 2; + + // Throttling status. If unset, the latest response still describes + // the current throttling status. + ThrottleStatus throttle_status = 5; +} + +// Information needed to request additional streams for an established read +// session. +message BatchCreateReadSessionStreamsRequest { + // Required. Must be a non-expired session obtained from a call to + // CreateReadSession. Only the name field needs to be set. + ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Number of new streams requested. Must be positive. + // Number of added streams may be less than this, see CreateReadSessionRequest + // for more information. + int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The response from `BatchCreateReadSessionStreams` returns the stream +// identifiers for the newly created streams. +message BatchCreateReadSessionStreamsResponse { + // Newly added streams. + repeated Stream streams = 1; +} + +// Request information for invoking `FinalizeStream`. +message FinalizeStreamRequest { + // Stream to finalize. + Stream stream = 2; +} + +// Request information for `SplitReadStream`. +message SplitReadStreamRequest { + // Stream to split. + Stream original_stream = 1; + + // A value in the range (0.0, 1.0) that specifies the fractional point at + // which the original stream should be split. The actual split point is + // evaluated on pre-filtered rows, so if a filter is provided, then there is + // no guarantee that the division of the rows between the new child streams + // will be proportional to this fractional value. Additionally, because the + // server-side unit for assigning data is collections of rows, this fraction + // will always map to to a data storage boundary on the server side. + float fraction = 2; +} + +// Response from `SplitReadStream`. +message SplitReadStreamResponse { + // Primary stream, which contains the beginning portion of + // |original_stream|. An empty value indicates that the original stream can no + // longer be split. + Stream primary_stream = 1; + + // Remainder stream, which contains the tail of |original_stream|. An empty + // value indicates that the original stream can no longer be split. + Stream remainder_stream = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto new file mode 100644 index 00000000000..a55dc48eb02 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -0,0 +1,43 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "TableReferenceProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Table reference that includes just the 3 strings needed to identify a table. +message TableReference { + // The assigned project ID of the project. + string project_id = 1; + + // The ID of the dataset in the above project. + string dataset_id = 2; + + // The ID of the table in the above dataset. + string table_id = 3; +} + +// All fields in this message optional. +message TableModifiers { + // The snapshot time of the table. If not set, interpreted as now. + google.protobuf.Timestamp snapshot_time = 1; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts new file mode 100644 index 00000000000..a0b708af703 --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -0,0 +1,6137 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as Long from "long"; +import * as $protobuf from "protobufjs"; +/** Namespace google. */ +export namespace google { + + /** Namespace cloud. */ + namespace cloud { + + /** Namespace bigquery. */ + namespace bigquery { + + /** Namespace storage. */ + namespace storage { + + /** Namespace v1beta1. */ + namespace v1beta1 { + + /** Properties of an ArrowSchema. */ + interface IArrowSchema { + + /** ArrowSchema serializedSchema */ + serializedSchema?: (Uint8Array|string|null); + } + + /** Represents an ArrowSchema. */ + class ArrowSchema implements IArrowSchema { + + /** + * Constructs a new ArrowSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); + + /** ArrowSchema serializedSchema. */ + public serializedSchema: (Uint8Array|string); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Verifies an ArrowSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @param message ArrowSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an ArrowRecordBatch. */ + interface IArrowRecordBatch { + + /** ArrowRecordBatch serializedRecordBatch */ + serializedRecordBatch?: (Uint8Array|string|null); + + /** ArrowRecordBatch rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an ArrowRecordBatch. */ + class ArrowRecordBatch implements IArrowRecordBatch { + + /** + * Constructs a new ArrowRecordBatch. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); + + /** ArrowRecordBatch serializedRecordBatch. */ + public serializedRecordBatch: (Uint8Array|string); + + /** ArrowRecordBatch rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowRecordBatch instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Verifies an ArrowRecordBatch message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowRecordBatch + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @param message ArrowRecordBatch + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an AvroSchema. */ + interface IAvroSchema { + + /** AvroSchema schema */ + schema?: (string|null); + } + + /** Represents an AvroSchema. */ + class AvroSchema implements IAvroSchema { + + /** + * Constructs a new AvroSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema); + + /** AvroSchema schema. */ + public schema: string; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Verifies an AvroSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @param message AvroSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an AvroRows. */ + interface IAvroRows { + + /** AvroRows serializedBinaryRows */ + serializedBinaryRows?: (Uint8Array|string|null); + + /** AvroRows rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an AvroRows. */ + class AvroRows implements IAvroRows { + + /** + * Constructs a new AvroRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); + + /** AvroRows serializedBinaryRows. */ + public serializedBinaryRows: (Uint8Array|string); + + /** AvroRows rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new AvroRows instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Verifies an AvroRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @param message AvroRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { + + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); + } + + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { + + /** + * Constructs a new TableReadOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions); + + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReadOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Verifies a TableReadOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReadOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReadOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Represents a BigQueryStorage */ + class BigQueryStorage extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryStorage service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryStorage service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryStorage; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadSession + */ + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback): void; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @returns Promise + */ + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): Promise; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback): void; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @returns Promise + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): Promise; + + /** + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + */ + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback): void; + + /** + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @returns Promise + */ + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): Promise; + + /** + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback): void; + + /** + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @returns Promise + */ + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): Promise; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback): void; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @returns Promise + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): Promise; + } + + namespace BigQueryStorage { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * @param error Error, if any + * @param [response] ReadSession + */ + type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * @param error Error, if any + * @param [response] ReadRowsResponse + */ + type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * @param error Error, if any + * @param [response] BatchCreateReadSessionStreamsResponse + */ + type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * @param error Error, if any + * @param [response] Empty + */ + type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * @param error Error, if any + * @param [response] SplitReadStreamResponse + */ + type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) => void; + } + + /** Properties of a Stream. */ + interface IStream { + + /** Stream name */ + name?: (string|null); + } + + /** Represents a Stream. */ + class Stream implements IStream { + + /** + * Constructs a new Stream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStream); + + /** Stream name. */ + public name: string; + + /** + * Creates a new Stream instance using the specified properties. + * @param [properties] Properties to set + * @returns Stream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStream): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Stream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Decodes a Stream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Verifies a Stream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Stream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Creates a plain object from a Stream message. Also converts values to other types if specified. + * @param message Stream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Stream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Stream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a StreamPosition. */ + interface IStreamPosition { + + /** StreamPosition stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** StreamPosition offset */ + offset?: (number|Long|string|null); + } + + /** Represents a StreamPosition. */ + class StreamPosition implements IStreamPosition { + + /** + * Constructs a new StreamPosition. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition); + + /** StreamPosition stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** StreamPosition offset. */ + public offset: (number|Long|string); + + /** + * Creates a new StreamPosition instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamPosition instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamPosition message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Verifies a StreamPosition message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamPosition + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * @param message StreamPosition + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamPosition, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamPosition to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReadSession. */ + interface IReadSession { + + /** ReadSession name */ + name?: (string|null); + + /** ReadSession expireTime */ + expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + + /** ReadSession tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + } + + /** Represents a ReadSession. */ + class ReadSession implements IReadSession { + + /** + * Constructs a new ReadSession. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession); + + /** ReadSession name. */ + public name: string; + + /** ReadSession expireTime. */ + public expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** ReadSession tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** ReadSession schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + + /** + * Creates a new ReadSession instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadSession instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Verifies a ReadSession message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadSession + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @param message ReadSession + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadSession to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CreateReadSessionRequest. */ + interface ICreateReadSessionRequest { + + /** CreateReadSessionRequest tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** CreateReadSessionRequest parent */ + parent?: (string|null); + + /** CreateReadSessionRequest tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** CreateReadSessionRequest requestedStreams */ + requestedStreams?: (number|null); + + /** CreateReadSessionRequest readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + + /** CreateReadSessionRequest format */ + format?: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat|null); + + /** CreateReadSessionRequest shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + } + + /** Represents a CreateReadSessionRequest. */ + class CreateReadSessionRequest implements ICreateReadSessionRequest { + + /** + * Constructs a new CreateReadSessionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest); + + /** CreateReadSessionRequest tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** CreateReadSessionRequest parent. */ + public parent: string; + + /** CreateReadSessionRequest tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** CreateReadSessionRequest requestedStreams. */ + public requestedStreams: number; + + /** CreateReadSessionRequest readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + + /** CreateReadSessionRequest format. */ + public format: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat); + + /** CreateReadSessionRequest shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateReadSessionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Verifies a CreateReadSessionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateReadSessionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @param message CreateReadSessionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 3 + } + + /** ShardingStrategy enum. */ + enum ShardingStrategy { + SHARDING_STRATEGY_UNSPECIFIED = 0, + LIQUID = 1, + BALANCED = 2 + } + + /** Properties of a ReadRowsRequest. */ + interface IReadRowsRequest { + + /** ReadRowsRequest readPosition */ + readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + } + + /** Represents a ReadRowsRequest. */ + class ReadRowsRequest implements IReadRowsRequest { + + /** + * Constructs a new ReadRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest); + + /** ReadRowsRequest readPosition. */ + public readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Verifies a ReadRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @param message ReadRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a StreamStatus. */ + interface IStreamStatus { + + /** StreamStatus estimatedRowCount */ + estimatedRowCount?: (number|Long|string|null); + + /** StreamStatus fractionConsumed */ + fractionConsumed?: (number|null); + + /** StreamStatus progress */ + progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable */ + isSplittable?: (boolean|null); + } + + /** Represents a StreamStatus. */ + class StreamStatus implements IStreamStatus { + + /** + * Constructs a new StreamStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus); + + /** StreamStatus estimatedRowCount. */ + public estimatedRowCount: (number|Long|string); + + /** StreamStatus fractionConsumed. */ + public fractionConsumed: number; + + /** StreamStatus progress. */ + public progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable. */ + public isSplittable: boolean; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Verifies a StreamStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @param message StreamStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Progress. */ + interface IProgress { + + /** Progress atResponseStart */ + atResponseStart?: (number|null); + + /** Progress atResponseEnd */ + atResponseEnd?: (number|null); + } + + /** Represents a Progress. */ + class Progress implements IProgress { + + /** + * Constructs a new Progress. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IProgress); + + /** Progress atResponseStart. */ + public atResponseStart: number; + + /** Progress atResponseEnd. */ + public atResponseEnd: number; + + /** + * Creates a new Progress instance using the specified properties. + * @param [properties] Properties to set + * @returns Progress instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IProgress): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Verifies a Progress message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Progress + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @param message Progress + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Progress to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ThrottleStatus. */ + interface IThrottleStatus { + + /** ThrottleStatus throttlePercent */ + throttlePercent?: (number|null); + } + + /** Represents a ThrottleStatus. */ + class ThrottleStatus implements IThrottleStatus { + + /** + * Constructs a new ThrottleStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus); + + /** ThrottleStatus throttlePercent. */ + public throttlePercent: number; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns ThrottleStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Verifies a ThrottleStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ThrottleStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @param message ThrottleStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ThrottleStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ThrottleStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReadRowsResponse. */ + interface IReadRowsResponse { + + /** ReadRowsResponse avroRows */ + avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch */ + arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount */ + rowCount?: (number|Long|string|null); + + /** ReadRowsResponse status */ + status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus */ + throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + } + + /** Represents a ReadRowsResponse. */ + class ReadRowsResponse implements IReadRowsResponse { + + /** + * Constructs a new ReadRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse); + + /** ReadRowsResponse avroRows. */ + public avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch. */ + public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount. */ + public rowCount: (number|Long|string); + + /** ReadRowsResponse status. */ + public status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus. */ + public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + + /** ReadRowsResponse rows. */ + public rows?: ("avroRows"|"arrowRecordBatch"); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Verifies a ReadRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @param message ReadRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a BatchCreateReadSessionStreamsRequest. */ + interface IBatchCreateReadSessionStreamsRequest { + + /** BatchCreateReadSessionStreamsRequest session */ + session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams */ + requestedStreams?: (number|null); + } + + /** Represents a BatchCreateReadSessionStreamsRequest. */ + class BatchCreateReadSessionStreamsRequest implements IBatchCreateReadSessionStreamsRequest { + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest); + + /** BatchCreateReadSessionStreamsRequest session. */ + public session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams. */ + public requestedStreams: number; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a BatchCreateReadSessionStreamsResponse. */ + interface IBatchCreateReadSessionStreamsResponse { + + /** BatchCreateReadSessionStreamsResponse streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + } + + /** Represents a BatchCreateReadSessionStreamsResponse. */ + class BatchCreateReadSessionStreamsResponse implements IBatchCreateReadSessionStreamsResponse { + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse); + + /** BatchCreateReadSessionStreamsResponse streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FinalizeStreamRequest. */ + interface IFinalizeStreamRequest { + + /** FinalizeStreamRequest stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a FinalizeStreamRequest. */ + class FinalizeStreamRequest implements IFinalizeStreamRequest { + + /** + * Constructs a new FinalizeStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest); + + /** FinalizeStreamRequest stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns FinalizeStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Verifies a FinalizeStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FinalizeStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @param message FinalizeStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamRequest. */ + interface ISplitReadStreamRequest { + + /** SplitReadStreamRequest originalStream */ + originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction */ + fraction?: (number|null); + } + + /** Represents a SplitReadStreamRequest. */ + class SplitReadStreamRequest implements ISplitReadStreamRequest { + + /** + * Constructs a new SplitReadStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest); + + /** SplitReadStreamRequest originalStream. */ + public originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction. */ + public fraction: number; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Verifies a SplitReadStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @param message SplitReadStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamResponse. */ + interface ISplitReadStreamResponse { + + /** SplitReadStreamResponse primaryStream */ + primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream */ + remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a SplitReadStreamResponse. */ + class SplitReadStreamResponse implements ISplitReadStreamResponse { + + /** + * Constructs a new SplitReadStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse); + + /** SplitReadStreamResponse primaryStream. */ + public primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream. */ + public remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Verifies a SplitReadStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @param message SplitReadStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableReference. */ + interface ITableReference { + + /** TableReference projectId */ + projectId?: (string|null); + + /** TableReference datasetId */ + datasetId?: (string|null); + + /** TableReference tableId */ + tableId?: (string|null); + } + + /** Represents a TableReference. */ + class TableReference implements ITableReference { + + /** + * Constructs a new TableReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference); + + /** TableReference projectId. */ + public projectId: string; + + /** TableReference datasetId. */ + public datasetId: string; + + /** TableReference tableId. */ + public tableId: string; + + /** + * Creates a new TableReference instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReference instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Verifies a TableReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReference + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @param message TableReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableModifiers. */ + interface ITableModifiers { + + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { + + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers); + + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + } + } + } + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get: string; + + /** HttpRule put. */ + public put: string; + + /** HttpRule post. */ + public post: string; + + /** HttpRule delete. */ + public delete: string; + + /** HttpRule patch. */ + public patch: string; + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** FieldBehavior enum. */ + enum FieldBehavior { + FIELD_BEHAVIOR_UNSPECIFIED = 0, + OPTIONAL = 1, + REQUIRED = 2, + OUTPUT_ONLY = 3, + INPUT_ONLY = 4, + IMMUTABLE = 5 + } + + /** Properties of a ResourceDescriptor. */ + interface IResourceDescriptor { + + /** ResourceDescriptor type */ + type?: (string|null); + + /** ResourceDescriptor pattern */ + pattern?: (string[]|null); + + /** ResourceDescriptor nameField */ + nameField?: (string|null); + + /** ResourceDescriptor history */ + history?: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History|null); + + /** ResourceDescriptor plural */ + plural?: (string|null); + + /** ResourceDescriptor singular */ + singular?: (string|null); + } + + /** Represents a ResourceDescriptor. */ + class ResourceDescriptor implements IResourceDescriptor { + + /** + * Constructs a new ResourceDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceDescriptor); + + /** ResourceDescriptor type. */ + public type: string; + + /** ResourceDescriptor pattern. */ + public pattern: string[]; + + /** ResourceDescriptor nameField. */ + public nameField: string; + + /** ResourceDescriptor history. */ + public history: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History); + + /** ResourceDescriptor plural. */ + public plural: string; + + /** ResourceDescriptor singular. */ + public singular: string; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceDescriptor instance + */ + public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; + + /** + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; + + /** + * Verifies a ResourceDescriptor message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceDescriptor + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; + + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @param message ResourceDescriptor + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceDescriptor to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace ResourceDescriptor { + + /** History enum. */ + enum History { + HISTORY_UNSPECIFIED = 0, + ORIGINALLY_SINGLE_PATTERN = 1, + FUTURE_MULTI_PATTERN = 2 + } + } + + /** Properties of a ResourceReference. */ + interface IResourceReference { + + /** ResourceReference type */ + type?: (string|null); + + /** ResourceReference childType */ + childType?: (string|null); + } + + /** Represents a ResourceReference. */ + class ResourceReference implements IResourceReference { + + /** + * Constructs a new ResourceReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceReference); + + /** ResourceReference type. */ + public type: string; + + /** ResourceReference childType. */ + public childType: string; + + /** + * Creates a new ResourceReference instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceReference instance + */ + public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; + + /** + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; + + /** + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; + + /** + * Verifies a ResourceReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceReference + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; + + /** + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @param message ResourceReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label); + + /** FieldDescriptorProto type. */ + public type: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type); + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FileOptions .google.api.resourceDefinition */ + ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode); + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FileOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FileOptions instance + */ + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + + /** + * Verifies a FileOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FileOptions { + + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } + + /** Properties of a MessageOptions. */ + interface IMessageOptions { + + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); + + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); + + /** MessageOptions deprecated */ + deprecated?: (boolean|null); + + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); + + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MessageOptions .google.api.resource */ + ".google.api.resource"?: (google.api.IResourceDescriptor|null); + } + + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { + + /** + * Constructs a new MessageOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMessageOptions); + + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; + + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; + + /** MessageOptions deprecated. */ + public deprecated: boolean; + + /** MessageOptions mapEntry. */ + public mapEntry: boolean; + + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MessageOptions instance + */ + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + + /** + * Verifies a MessageOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MessageOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MessageOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldOptions. */ + interface IFieldOptions { + + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType|null); + + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FieldOptions .google.api.fieldBehavior */ + ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); + + /** FieldOptions .google.api.resourceReference */ + ".google.api.resourceReference"?: (google.api.IResourceReference|null); + } + + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { + + /** + * Constructs a new FieldOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldOptions); + + /** FieldOptions ctype. */ + public ctype: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType); + + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType); + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldOptions instance + */ + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + + /** + * Verifies a FieldOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldOptions { + + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } + + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } + + /** Properties of an OneofOptions. */ + interface IOneofOptions { + + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { + + /** + * Constructs a new OneofOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofOptions); + + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofOptions instance + */ + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + + /** + * Verifies an OneofOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumOptions. */ + interface IEnumOptions { + + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); + + /** EnumOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { + + /** + * Constructs a new EnumOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumOptions); + + /** EnumOptions allowAlias. */ + public allowAlias: boolean; + + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumOptions instance + */ + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + + /** + * Verifies an EnumOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { + + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { + + /** + * Constructs a new EnumValueOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueOptions); + + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueOptions instance + */ + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + + /** + * Verifies an EnumValueOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceOptions. */ + interface IServiceOptions { + + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); + + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); + + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); + } + + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { + + /** + * Constructs a new ServiceOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceOptions); + + /** ServiceOptions deprecated. */ + public deprecated: boolean; + + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceOptions instance + */ + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + + /** + * Verifies a ServiceOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodOptions. */ + interface IMethodOptions { + + /** MethodOptions deprecated */ + deprecated?: (boolean|null); + + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); + + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); + + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } + + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { + + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); + + /** MethodOptions deprecated. */ + public deprecated: boolean; + + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); + + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodOptions instance + */ + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + + /** + * Verifies a MethodOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace MethodOptions { + + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 + } + } + + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { + + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); + + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|string|null); + + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|string|null); + + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); + + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|string|null); + + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); + } + + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { + + /** + * Constructs a new UninterpretedOption. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUninterpretedOption); + + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long|string); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long|string); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: (Uint8Array|string); + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @param [properties] Properties to set + * @returns UninterpretedOption instance + */ + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + + /** + * Verifies an UninterpretedOption message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UninterpretedOption + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UninterpretedOption to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace UninterpretedOption { + + /** Properties of a NamePart. */ + interface INamePart { + + /** NamePart namePart */ + namePart: string; + + /** NamePart isExtension */ + isExtension: boolean; + } + + /** Represents a NamePart. */ + class NamePart implements INamePart { + + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); + + /** NamePart namePart. */ + public namePart: string; + + /** NamePart isExtension. */ + public isExtension: boolean; + + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; + + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { + + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } + + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { + + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); + + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SourceCodeInfo instance + */ + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + + /** + * Verifies a SourceCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SourceCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SourceCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace SourceCodeInfo { + + /** Properties of a Location. */ + interface ILocation { + + /** Location path */ + path?: (number[]|null); + + /** Location span */ + span?: (number[]|null); + + /** Location leadingComments */ + leadingComments?: (string|null); + + /** Location trailingComments */ + trailingComments?: (string|null); + + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); + + /** Location path. */ + public path: number[]; + + /** Location span. */ + public span: number[]; + + /** Location leadingComments. */ + public leadingComments: string; + + /** Location trailingComments. */ + public trailingComments: string; + + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { + + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); + } + + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { + + /** + * Constructs a new GeneratedCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IGeneratedCodeInfo); + + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns GeneratedCodeInfo instance + */ + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + + /** + * Verifies a GeneratedCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GeneratedCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace GeneratedCodeInfo { + + /** Properties of an Annotation. */ + interface IAnnotation { + + /** Annotation path */ + path?: (number[]|null); + + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ + end?: (number|null); + } + + /** Represents an Annotation. */ + class Annotation implements IAnnotation { + + /** + * Constructs a new Annotation. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); + + /** Annotation path. */ + public path: number[]; + + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ + public end: number; + + /** + * Creates a new Annotation instance using the specified properties. + * @param [properties] Properties to set + * @returns Annotation instance + */ + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Verifies an Annotation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Annotation + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Annotation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a Timestamp. */ + interface ITimestamp { + + /** Timestamp seconds */ + seconds?: (number|Long|string|null); + + /** Timestamp nanos */ + nanos?: (number|null); + } + + /** Represents a Timestamp. */ + class Timestamp implements ITimestamp { + + /** + * Constructs a new Timestamp. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ITimestamp); + + /** Timestamp seconds. */ + public seconds: (number|Long|string); + + /** Timestamp nanos. */ + public nanos: number; + + /** + * Creates a new Timestamp instance using the specified properties. + * @param [properties] Properties to set + * @returns Timestamp instance + */ + public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; + + /** + * Verifies a Timestamp message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Timestamp + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @param message Timestamp + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Timestamp to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an Empty. */ + interface IEmpty { + } + + /** Represents an Empty. */ + class Empty implements IEmpty { + + /** + * Constructs a new Empty. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEmpty); + + /** + * Creates a new Empty instance using the specified properties. + * @param [properties] Properties to set + * @returns Empty instance + */ + public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; + + /** + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Empty message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; + + /** + * Decodes an Empty message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; + + /** + * Verifies an Empty message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Empty + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; + + /** + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @param message Empty + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Empty to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js new file mode 100644 index 00000000000..8da21884529 --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.js @@ -0,0 +1,15841 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/ +(function(global, factory) { /* global define, require, module */ + + /* AMD */ if (typeof define === 'function' && define.amd) + define(["protobufjs/minimal"], factory); + + /* CommonJS */ else if (typeof require === 'function' && typeof module === 'object' && module && module.exports) + module.exports = factory(require("protobufjs/minimal")); + +})(this, function($protobuf) { + "use strict"; + + // Common aliases + var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + + // Exported root namespace + var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + + $root.google = (function() { + + /** + * Namespace google. + * @exports google + * @namespace + */ + var google = {}; + + google.cloud = (function() { + + /** + * Namespace cloud. + * @memberof google + * @namespace + */ + var cloud = {}; + + cloud.bigquery = (function() { + + /** + * Namespace bigquery. + * @memberof google.cloud + * @namespace + */ + var bigquery = {}; + + bigquery.storage = (function() { + + /** + * Namespace storage. + * @memberof google.cloud.bigquery + * @namespace + */ + var storage = {}; + + storage.v1beta1 = (function() { + + /** + * Namespace v1beta1. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1beta1 = {}; + + v1beta1.ArrowSchema = (function() { + + /** + * Properties of an ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IArrowSchema + * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema + */ + + /** + * Constructs a new ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an ArrowSchema. + * @implements IArrowSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + */ + function ArrowSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSchema serializedSchema. + * @member {Uint8Array} serializedSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @instance + */ + ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema instance + */ + ArrowSchema.create = function create(properties) { + return new ArrowSchema(properties); + }; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); + return writer; + }; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedSchema = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) + return "serializedSchema: buffer expected"; + return null; + }; + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + */ + ArrowSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + if (object.serializedSchema != null) + if (typeof object.serializedSchema === "string") + $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); + else if (object.serializedSchema.length) + message.serializedSchema = object.serializedSchema; + return message; + }; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ArrowSchema} message ArrowSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if (options.bytes === String) + object.serializedSchema = ""; + else { + object.serializedSchema = []; + if (options.bytes !== Array) + object.serializedSchema = $util.newBuffer(object.serializedSchema); + } + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; + return object; + }; + + /** + * Converts this ArrowSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @instance + * @returns {Object.} JSON object + */ + ArrowSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ArrowSchema; + })(); + + v1beta1.ArrowRecordBatch = (function() { + + /** + * Properties of an ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IArrowRecordBatch + * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch + * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount + */ + + /** + * Constructs a new ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an ArrowRecordBatch. + * @implements IArrowRecordBatch + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + */ + function ArrowRecordBatch(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowRecordBatch serializedRecordBatch. + * @member {Uint8Array} serializedRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); + + /** + * ArrowRecordBatch rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch instance + */ + ArrowRecordBatch.create = function create(properties) { + return new ArrowRecordBatch(properties); + }; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedRecordBatch = reader.bytes(); + break; + case 2: + message.rowCount = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowRecordBatch message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowRecordBatch.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) + return "serializedRecordBatch: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + */ + ArrowRecordBatch.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + if (object.serializedRecordBatch != null) + if (typeof object.serializedRecordBatch === "string") + $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); + else if (object.serializedRecordBatch.length) + message.serializedRecordBatch = object.serializedRecordBatch; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} message ArrowRecordBatch + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowRecordBatch.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedRecordBatch = ""; + else { + object.serializedRecordBatch = []; + if (options.bytes !== Array) + object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + * @returns {Object.} JSON object + */ + ArrowRecordBatch.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ArrowRecordBatch; + })(); + + v1beta1.AvroSchema = (function() { + + /** + * Properties of an AvroSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IAvroSchema + * @property {string|null} [schema] AvroSchema schema + */ + + /** + * Constructs a new AvroSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an AvroSchema. + * @implements IAvroSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + */ + function AvroSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSchema schema. + * @member {string} schema + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @instance + */ + AvroSchema.prototype.schema = ""; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema instance + */ + AvroSchema.create = function create(properties) { + return new AvroSchema(properties); + }; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.schema != null && message.hasOwnProperty("schema")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); + return writer; + }; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.schema = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.schema != null && message.hasOwnProperty("schema")) + if (!$util.isString(message.schema)) + return "schema: string expected"; + return null; + }; + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + */ + AvroSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + if (object.schema != null) + message.schema = String(object.schema); + return message; + }; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.AvroSchema} message AvroSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.schema = ""; + if (message.schema != null && message.hasOwnProperty("schema")) + object.schema = message.schema; + return object; + }; + + /** + * Converts this AvroSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @instance + * @returns {Object.} JSON object + */ + AvroSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return AvroSchema; + })(); + + v1beta1.AvroRows = (function() { + + /** + * Properties of an AvroRows. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IAvroRows + * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows + * @property {number|Long|null} [rowCount] AvroRows rowCount + */ + + /** + * Constructs a new AvroRows. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an AvroRows. + * @implements IAvroRows + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + */ + function AvroRows(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroRows serializedBinaryRows. + * @member {Uint8Array} serializedBinaryRows + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + */ + AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); + + /** + * AvroRows rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + */ + AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new AvroRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows instance + */ + AvroRows.create = function create(properties) { + return new AvroRows(properties); + }; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedBinaryRows = reader.bytes(); + break; + case 2: + message.rowCount = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) + return "serializedBinaryRows: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + */ + AvroRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + if (object.serializedBinaryRows != null) + if (typeof object.serializedBinaryRows === "string") + $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); + else if (object.serializedBinaryRows.length) + message.serializedBinaryRows = object.serializedBinaryRows; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.AvroRows} message AvroRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedBinaryRows = ""; + else { + object.serializedBinaryRows = []; + if (options.bytes !== Array) + object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this AvroRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + * @returns {Object.} JSON object + */ + AvroRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return AvroRows; + })(); + + v1beta1.TableReadOptions = (function() { + + /** + * Properties of a TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + */ + + /** + * Constructs a new TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + */ + function TableReadOptions(properties) { + this.selectedFields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + */ + TableReadOptions.prototype.selectedFields = $util.emptyArray; + + /** + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions instance + */ + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); + }; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + return writer; + }; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + case 2: + message.rowRestriction = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReadOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReadOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; + return null; + }; + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + */ + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); + } + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); + return message; + }; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} message TableReadOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReadOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; + return object; + }; + + /** + * Converts this TableReadOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + * @returns {Object.} JSON object + */ + TableReadOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableReadOptions; + })(); + + v1beta1.BigQueryStorage = (function() { + + /** + * Constructs a new BigQueryStorage service. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BigQueryStorage + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryStorage(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryStorage.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryStorage; + + /** + * Creates new BigQueryStorage service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryStorage} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryStorage.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef CreateReadSessionCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} [response] ReadSession + */ + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.createReadSession = function createReadSession(request, callback) { + return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadSession, request, callback); + }, "name", { value: "CreateReadSession" }); + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef ReadRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} [response] ReadRowsResponse + */ + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.readRows = function readRows(request, callback) { + return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, request, callback); + }, "name", { value: "ReadRows" }); + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef BatchCreateReadSessionStreamsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} [response] BatchCreateReadSessionStreamsResponse + */ + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback} callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.batchCreateReadSessionStreams = function batchCreateReadSessionStreams(request, callback) { + return this.rpcCall(batchCreateReadSessionStreams, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, request, callback); + }, "name", { value: "BatchCreateReadSessionStreams" }); + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef FinalizeStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.protobuf.Empty} [response] Empty + */ + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback} callback Node-style callback called with the error, if any, and Empty + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.finalizeStream = function finalizeStream(request, callback) { + return this.rpcCall(finalizeStream, $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, $root.google.protobuf.Empty, request, callback); + }, "name", { value: "FinalizeStream" }); + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef SplitReadStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} [response] SplitReadStreamResponse + */ + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.splitReadStream = function splitReadStream(request, callback) { + return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, request, callback); + }, "name", { value: "SplitReadStream" }); + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryStorage; + })(); + + v1beta1.Stream = (function() { + + /** + * Properties of a Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStream + * @property {string|null} [name] Stream name + */ + + /** + * Constructs a new Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Stream. + * @implements IStream + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + */ + function Stream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Stream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @instance + */ + Stream.prototype.name = ""; + + /** + * Creates a new Stream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream instance + */ + Stream.create = function create(properties) { + return new Stream(properties); + }; + + /** + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Stream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Stream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Stream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Stream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Stream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Stream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Stream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Stream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + */ + Stream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Stream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a Stream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.Stream} message Stream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Stream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this Stream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @instance + * @returns {Object.} JSON object + */ + Stream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Stream; + })(); + + v1beta1.StreamPosition = (function() { + + /** + * Properties of a StreamPosition. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStreamPosition + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] StreamPosition stream + * @property {number|Long|null} [offset] StreamPosition offset + */ + + /** + * Constructs a new StreamPosition. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a StreamPosition. + * @implements IStreamPosition + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + */ + function StreamPosition(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamPosition stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + */ + StreamPosition.prototype.stream = null; + + /** + * StreamPosition offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + */ + StreamPosition.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new StreamPosition instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition instance + */ + StreamPosition.create = function create(properties) { + return new StreamPosition(properties); + }; + + /** + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamPosition.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stream != null && message.hasOwnProperty("stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.offset != null && message.hasOwnProperty("offset")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); + return writer; + }; + + /** + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamPosition.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamPosition message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamPosition.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + case 2: + message.offset = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamPosition.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamPosition message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamPosition.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + if (error) + return "stream." + error; + } + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; + + /** + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + */ + StreamPosition.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamPosition) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamPosition.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + } + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} message StreamPosition + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamPosition.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.stream = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + } + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; + + /** + * Converts this StreamPosition to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + * @returns {Object.} JSON object + */ + StreamPosition.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return StreamPosition; + })(); + + v1beta1.ReadSession = (function() { + + /** + * Properties of a ReadSession. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {Array.|null} [streams] ReadSession streams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] ReadSession tableReference + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] ReadSession shardingStrategy + */ + + /** + * Constructs a new ReadSession. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadSession. + * @implements IReadSession + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set + */ + function ReadSession(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.name = ""; + + /** + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.expireTime = null; + + /** + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + /** + * ReadSession tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableReference = null; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.shardingStrategy = 0; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession instance + */ + ReadSession.create = function create(properties) { + return new ReadSession(properties); + }; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) + $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); + return writer; + }; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 5: + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); + break; + case 6: + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); + break; + case 4: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + case 7: + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + case 8: + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + case 9: + message.shardingStrategy = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadSession message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadSession.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); + if (error) + return "expireTime." + error; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (error) + return "tableReference." + error; + } + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + */ + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadSession) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + switch (object.shardingStrategy) { + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} message ReadSession + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadSession.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (options.defaults) { + object.name = ""; + object.expireTime = null; + object.tableReference = null; + object.tableModifiers = null; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + return object; + }; + + /** + * Converts this ReadSession to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + * @returns {Object.} JSON object + */ + ReadSession.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadSession; + })(); + + v1beta1.CreateReadSessionRequest = (function() { + + /** + * Properties of a CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ICreateReadSessionRequest + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] CreateReadSessionRequest tableReference + * @property {string|null} [parent] CreateReadSessionRequest parent + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] CreateReadSessionRequest tableModifiers + * @property {number|null} [requestedStreams] CreateReadSessionRequest requestedStreams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null} [readOptions] CreateReadSessionRequest readOptions + * @property {google.cloud.bigquery.storage.v1beta1.DataFormat|null} [format] CreateReadSessionRequest format + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] CreateReadSessionRequest shardingStrategy + */ + + /** + * Constructs a new CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a CreateReadSessionRequest. + * @implements ICreateReadSessionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + */ + function CreateReadSessionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateReadSessionRequest tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.tableReference = null; + + /** + * CreateReadSessionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.parent = ""; + + /** + * CreateReadSessionRequest tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.tableModifiers = null; + + /** + * CreateReadSessionRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.requestedStreams = 0; + + /** + * CreateReadSessionRequest readOptions. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.readOptions = null; + + /** + * CreateReadSessionRequest format. + * @member {google.cloud.bigquery.storage.v1beta1.DataFormat} format + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.format = 0; + + /** + * CreateReadSessionRequest shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.shardingStrategy = 0; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest instance + */ + CreateReadSessionRequest.create = function create(properties) { + return new CreateReadSessionRequest(properties); + }; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.format != null && message.hasOwnProperty("format")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); + if (message.parent != null && message.hasOwnProperty("parent")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); + return writer; + }; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + case 6: + message.parent = reader.string(); + break; + case 2: + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + case 3: + message.requestedStreams = reader.int32(); + break; + case 4: + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); + break; + case 5: + message.format = reader.int32(); + break; + case 7: + message.shardingStrategy = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateReadSessionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateReadSessionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (error) + return "tableReference." + error; + } + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.format != null && message.hasOwnProperty("format")) + switch (message.format) { + default: + return "format: enum value expected"; + case 0: + case 1: + case 3: + break; + } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + */ + CreateReadSessionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.parent != null) + message.parent = String(object.parent); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); + } + switch (object.format) { + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.format = 0; + break; + case "AVRO": + case 1: + message.format = 1; + break; + case "ARROW": + case 3: + message.format = 3; + break; + } + switch (object.shardingStrategy) { + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} message CreateReadSessionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateReadSessionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.tableReference = null; + object.tableModifiers = null; + object.requestedStreams = 0; + object.readOptions = null; + object.format = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.parent = ""; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); + if (message.format != null && message.hasOwnProperty("format")) + object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + return object; + }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateReadSessionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return CreateReadSessionRequest; + })(); + + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1beta1.DataFormat + * @enum {string} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=3 ARROW value + */ + v1beta1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[3] = "ARROW"] = 3; + return values; + })(); + + /** + * ShardingStrategy enum. + * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy + * @enum {string} + * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value + * @property {number} LIQUID=1 LIQUID value + * @property {number} BALANCED=2 BALANCED value + */ + v1beta1.ShardingStrategy = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; + values[valuesById[1] = "LIQUID"] = 1; + values[valuesById[2] = "BALANCED"] = 2; + return values; + })(); + + v1beta1.ReadRowsRequest = (function() { + + /** + * Properties of a ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null} [readPosition] ReadRowsRequest readPosition + */ + + /** + * Constructs a new ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsRequest. + * @implements IReadRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + */ + function ReadRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsRequest readPosition. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null|undefined} readPosition + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.readPosition = null; + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest instance + */ + ReadRowsRequest.create = function create(properties) { + return new ReadRowsRequest(properties); + }; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.readPosition != null && message.hasOwnProperty("readPosition")) + $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.verify(message.readPosition); + if (error) + return "readPosition." + error; + } + return null; + }; + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + */ + ReadRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + if (object.readPosition != null) { + if (typeof object.readPosition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.readPosition: object expected"); + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.fromObject(object.readPosition); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} message ReadRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.readPosition = null; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) + object.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.toObject(message.readPosition, options); + return object; + }; + + /** + * Converts this ReadRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + * @returns {Object.} JSON object + */ + ReadRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsRequest; + })(); + + v1beta1.StreamStatus = (function() { + + /** + * Properties of a StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStreamStatus + * @property {number|Long|null} [estimatedRowCount] StreamStatus estimatedRowCount + * @property {number|null} [fractionConsumed] StreamStatus fractionConsumed + * @property {google.cloud.bigquery.storage.v1beta1.IProgress|null} [progress] StreamStatus progress + * @property {boolean|null} [isSplittable] StreamStatus isSplittable + */ + + /** + * Constructs a new StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a StreamStatus. + * @implements IStreamStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + */ + function StreamStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamStatus estimatedRowCount. + * @member {number|Long} estimatedRowCount + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * StreamStatus fractionConsumed. + * @member {number} fractionConsumed + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.fractionConsumed = 0; + + /** + * StreamStatus progress. + * @member {google.cloud.bigquery.storage.v1beta1.IProgress|null|undefined} progress + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.progress = null; + + /** + * StreamStatus isSplittable. + * @member {boolean} isSplittable + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.isSplittable = false; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus instance + */ + StreamStatus.create = function create(properties) { + return new StreamStatus(properties); + }; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); + if (message.progress != null && message.hasOwnProperty("progress")) + $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.estimatedRowCount = reader.int64(); + break; + case 2: + message.fractionConsumed = reader.float(); + break; + case 4: + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); + break; + case 3: + message.isSplittable = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) + return "estimatedRowCount: integer|Long expected"; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + if (typeof message.fractionConsumed !== "number") + return "fractionConsumed: number expected"; + if (message.progress != null && message.hasOwnProperty("progress")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Progress.verify(message.progress); + if (error) + return "progress." + error; + } + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + if (typeof message.isSplittable !== "boolean") + return "isSplittable: boolean expected"; + return null; + }; + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + */ + StreamStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + if (object.estimatedRowCount != null) + if ($util.Long) + (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; + else if (typeof object.estimatedRowCount === "string") + message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); + else if (typeof object.estimatedRowCount === "number") + message.estimatedRowCount = object.estimatedRowCount; + else if (typeof object.estimatedRowCount === "object") + message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); + if (object.fractionConsumed != null) + message.fractionConsumed = Number(object.fractionConsumed); + if (object.progress != null) { + if (typeof object.progress !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamStatus.progress: object expected"); + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.fromObject(object.progress); + } + if (object.isSplittable != null) + message.isSplittable = Boolean(object.isSplittable); + return message; + }; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.StreamStatus} message StreamStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedRowCount = options.longs === String ? "0" : 0; + object.fractionConsumed = 0; + object.isSplittable = false; + object.progress = null; + } + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (typeof message.estimatedRowCount === "number") + object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; + else + object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + object.fractionConsumed = options.json && !isFinite(message.fractionConsumed) ? String(message.fractionConsumed) : message.fractionConsumed; + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + object.isSplittable = message.isSplittable; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.toObject(message.progress, options); + return object; + }; + + /** + * Converts this StreamStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + * @returns {Object.} JSON object + */ + StreamStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return StreamStatus; + })(); + + v1beta1.Progress = (function() { + + /** + * Properties of a Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IProgress + * @property {number|null} [atResponseStart] Progress atResponseStart + * @property {number|null} [atResponseEnd] Progress atResponseEnd + */ + + /** + * Constructs a new Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Progress. + * @implements IProgress + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + */ + function Progress(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Progress atResponseStart. + * @member {number} atResponseStart + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseStart = 0; + + /** + * Progress atResponseEnd. + * @member {number} atResponseEnd + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseEnd = 0; + + /** + * Creates a new Progress instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress instance + */ + Progress.create = function create(properties) { + return new Progress(properties); + }; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); + return writer; + }; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.atResponseStart = reader.float(); + break; + case 2: + message.atResponseEnd = reader.float(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Progress message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Progress.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (typeof message.atResponseStart !== "number") + return "atResponseStart: number expected"; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (typeof message.atResponseEnd !== "number") + return "atResponseEnd: number expected"; + return null; + }; + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + */ + Progress.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Progress) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + if (object.atResponseStart != null) + message.atResponseStart = Number(object.atResponseStart); + if (object.atResponseEnd != null) + message.atResponseEnd = Number(object.atResponseEnd); + return message; + }; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.Progress} message Progress + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Progress.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.atResponseStart = 0; + object.atResponseEnd = 0; + } + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + return object; + }; + + /** + * Converts this Progress to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + * @returns {Object.} JSON object + */ + Progress.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Progress; + })(); + + v1beta1.ThrottleStatus = (function() { + + /** + * Properties of a ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IThrottleStatus + * @property {number|null} [throttlePercent] ThrottleStatus throttlePercent + */ + + /** + * Constructs a new ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ThrottleStatus. + * @implements IThrottleStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + */ + function ThrottleStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ThrottleStatus throttlePercent. + * @member {number} throttlePercent + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + */ + ThrottleStatus.prototype.throttlePercent = 0; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus instance + */ + ThrottleStatus.create = function create(properties) { + return new ThrottleStatus(properties); + }; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + return writer; + }; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.throttlePercent = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ThrottleStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ThrottleStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (!$util.isInteger(message.throttlePercent)) + return "throttlePercent: integer expected"; + return null; + }; + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + */ + ThrottleStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + if (object.throttlePercent != null) + message.throttlePercent = object.throttlePercent | 0; + return message; + }; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} message ThrottleStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ThrottleStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.throttlePercent = 0; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + object.throttlePercent = message.throttlePercent; + return object; + }; + + /** + * Converts this ThrottleStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + * @returns {Object.} JSON object + */ + ThrottleStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ThrottleStatus; + })(); + + v1beta1.ReadRowsResponse = (function() { + + /** + * Properties of a ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsResponse + * @property {google.cloud.bigquery.storage.v1beta1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows + * @property {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch + * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount + * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status + * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus + */ + + /** + * Constructs a new ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsResponse. + * @implements IReadRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + */ + function ReadRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsResponse avroRows. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroRows|null|undefined} avroRows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroRows = null; + + /** + * ReadRowsResponse arrowRecordBatch. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null|undefined} arrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowRecordBatch = null; + + /** + * ReadRowsResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadRowsResponse status. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null|undefined} status + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.status = null; + + /** + * ReadRowsResponse throttleStatus. + * @member {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null|undefined} throttleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.throttleStatus = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadRowsResponse rows. + * @member {"avroRows"|"arrowRecordBatch"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse instance + */ + ReadRowsResponse.create = function create(properties) { + return new ReadRowsResponse(properties); + }; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.status != null && message.hasOwnProperty("status")) + $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) + $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) + $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); + break; + case 4: + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + case 6: + message.rowCount = reader.int64(); + break; + case 2: + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); + break; + case 5: + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.verify(message.avroRows); + if (error) + return "avroRows." + error; + } + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify(message.arrowRecordBatch); + if (error) + return "arrowRecordBatch." + error; + } + } + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + if (message.status != null && message.hasOwnProperty("status")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.verify(message.status); + if (error) + return "status." + error; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify(message.throttleStatus); + if (error) + return "throttleStatus." + error; + } + return null; + }; + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + */ + ReadRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + if (object.avroRows != null) { + if (typeof object.avroRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroRows: object expected"); + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.fromObject(object.avroRows); + } + if (object.arrowRecordBatch != null) { + if (typeof object.arrowRecordBatch !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowRecordBatch: object expected"); + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); + } + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + if (object.status != null) { + if (typeof object.status !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status: object expected"); + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.fromObject(object.status); + } + if (object.throttleStatus != null) { + if (typeof object.throttleStatus !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} message ReadRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.status = null; + object.throttleStatus = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.status != null && message.hasOwnProperty("status")) + object.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.toObject(message.status, options); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + object.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.toObject(message.avroRows, options); + if (options.oneofs) + object.rows = "avroRows"; + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); + if (options.oneofs) + object.rows = "arrowRecordBatch"; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) + object.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.toObject(message.throttleStatus, options); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ReadRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + * @returns {Object.} JSON object + */ + ReadRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsResponse; + })(); + + v1beta1.BatchCreateReadSessionStreamsRequest = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IReadSession|null} [session] BatchCreateReadSessionStreamsRequest session + * @property {number|null} [requestedStreams] BatchCreateReadSessionStreamsRequest requestedStreams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsRequest. + * @implements IBatchCreateReadSessionStreamsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsRequest session. + * @member {google.cloud.bigquery.storage.v1beta1.IReadSession|null|undefined} session + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.session = null; + + /** + * BatchCreateReadSessionStreamsRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.requestedStreams = 0; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest instance + */ + BatchCreateReadSessionStreamsRequest.create = function create(properties) { + return new BatchCreateReadSessionStreamsRequest(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.session != null && message.hasOwnProperty("session")) + $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); + break; + case 2: + message.requestedStreams = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.session != null && message.hasOwnProperty("session")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.verify(message.session); + if (error) + return "session." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + */ + BatchCreateReadSessionStreamsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + if (object.session != null) { + if (typeof object.session !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session: object expected"); + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.fromObject(object.session); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.session = null; + object.requestedStreams = 0; + } + if (message.session != null && message.hasOwnProperty("session")) + object.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.toObject(message.session, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return BatchCreateReadSessionStreamsRequest; + })(); + + v1beta1.BatchCreateReadSessionStreamsResponse = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsResponse + * @property {Array.|null} [streams] BatchCreateReadSessionStreamsResponse streams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsResponse. + * @implements IBatchCreateReadSessionStreamsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsResponse(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsResponse streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + */ + BatchCreateReadSessionStreamsResponse.prototype.streams = $util.emptyArray; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse instance + */ + BatchCreateReadSessionStreamsResponse.create = function create(properties) { + return new BatchCreateReadSessionStreamsResponse(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + */ + BatchCreateReadSessionStreamsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return BatchCreateReadSessionStreamsResponse; + })(); + + v1beta1.FinalizeStreamRequest = (function() { + + /** + * Properties of a FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IFinalizeStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] FinalizeStreamRequest stream + */ + + /** + * Constructs a new FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a FinalizeStreamRequest. + * @implements IFinalizeStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + */ + function FinalizeStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FinalizeStreamRequest stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + */ + FinalizeStreamRequest.prototype.stream = null; + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest instance + */ + FinalizeStreamRequest.create = function create(properties) { + return new FinalizeStreamRequest(properties); + }; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stream != null && message.hasOwnProperty("stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FinalizeStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FinalizeStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + if (error) + return "stream." + error; + } + return null; + }; + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + */ + FinalizeStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + } + return message; + }; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} message FinalizeStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FinalizeStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.stream = null; + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + return object; + }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + * @returns {Object.} JSON object + */ + FinalizeStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FinalizeStreamRequest; + })(); + + v1beta1.SplitReadStreamRequest = (function() { + + /** + * Properties of a SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [originalStream] SplitReadStreamRequest originalStream + * @property {number|null} [fraction] SplitReadStreamRequest fraction + */ + + /** + * Constructs a new SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamRequest. + * @implements ISplitReadStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + */ + function SplitReadStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamRequest originalStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} originalStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.originalStream = null; + + /** + * SplitReadStreamRequest fraction. + * @member {number} fraction + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.fraction = 0; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest instance + */ + SplitReadStreamRequest.create = function create(properties) { + return new SplitReadStreamRequest(properties); + }; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.originalStream != null && message.hasOwnProperty("originalStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fraction != null && message.hasOwnProperty("fraction")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + case 2: + message.fraction = reader.float(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.originalStream != null && message.hasOwnProperty("originalStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.originalStream); + if (error) + return "originalStream." + error; + } + if (message.fraction != null && message.hasOwnProperty("fraction")) + if (typeof message.fraction !== "number") + return "fraction: number expected"; + return null; + }; + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + */ + SplitReadStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + if (object.originalStream != null) { + if (typeof object.originalStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.originalStream: object expected"); + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.originalStream); + } + if (object.fraction != null) + message.fraction = Number(object.fraction); + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.originalStream = null; + object.fraction = 0; + } + if (message.originalStream != null && message.hasOwnProperty("originalStream")) + object.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.originalStream, options); + if (message.fraction != null && message.hasOwnProperty("fraction")) + object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + return object; + }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamRequest; + })(); + + v1beta1.SplitReadStreamResponse = (function() { + + /** + * Properties of a SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamResponse + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [primaryStream] SplitReadStreamResponse primaryStream + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [remainderStream] SplitReadStreamResponse remainderStream + */ + + /** + * Constructs a new SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamResponse. + * @implements ISplitReadStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + */ + function SplitReadStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamResponse primaryStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} primaryStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.primaryStream = null; + + /** + * SplitReadStreamResponse remainderStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} remainderStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.remainderStream = null; + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse instance + */ + SplitReadStreamResponse.create = function create(properties) { + return new SplitReadStreamResponse(properties); + }; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + case 2: + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.primaryStream); + if (error) + return "primaryStream." + error; + } + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.remainderStream); + if (error) + return "remainderStream." + error; + } + return null; + }; + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + */ + SplitReadStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + if (object.primaryStream != null) { + if (typeof object.primaryStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primaryStream: object expected"); + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.primaryStream); + } + if (object.remainderStream != null) { + if (typeof object.remainderStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainderStream: object expected"); + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.remainderStream); + } + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.primaryStream = null; + object.remainderStream = null; + } + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + object.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.primaryStream, options); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + object.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.remainderStream, options); + return object; + }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamResponse; + })(); + + v1beta1.TableReference = (function() { + + /** + * Properties of a TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableReference + * @property {string|null} [projectId] TableReference projectId + * @property {string|null} [datasetId] TableReference datasetId + * @property {string|null} [tableId] TableReference tableId + */ + + /** + * Constructs a new TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableReference. + * @implements ITableReference + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + */ + function TableReference(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReference projectId. + * @member {string} projectId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.projectId = ""; + + /** + * TableReference datasetId. + * @member {string} datasetId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.datasetId = ""; + + /** + * TableReference tableId. + * @member {string} tableId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.tableId = ""; + + /** + * Creates a new TableReference instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference instance + */ + TableReference.create = function create(properties) { + return new TableReference(properties); + }; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.projectId != null && message.hasOwnProperty("projectId")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); + if (message.tableId != null && message.hasOwnProperty("tableId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); + return writer; + }; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.projectId = reader.string(); + break; + case 2: + message.datasetId = reader.string(); + break; + case 3: + message.tableId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReference message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReference.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.projectId != null && message.hasOwnProperty("projectId")) + if (!$util.isString(message.projectId)) + return "projectId: string expected"; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + if (!$util.isString(message.datasetId)) + return "datasetId: string expected"; + if (message.tableId != null && message.hasOwnProperty("tableId")) + if (!$util.isString(message.tableId)) + return "tableId: string expected"; + return null; + }; + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + */ + TableReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReference) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + if (object.projectId != null) + message.projectId = String(object.projectId); + if (object.datasetId != null) + message.datasetId = String(object.datasetId); + if (object.tableId != null) + message.tableId = String(object.tableId); + return message; + }; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableReference} message TableReference + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReference.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.projectId = ""; + object.datasetId = ""; + object.tableId = ""; + } + if (message.projectId != null && message.hasOwnProperty("projectId")) + object.projectId = message.projectId; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + object.datasetId = message.datasetId; + if (message.tableId != null && message.hasOwnProperty("tableId")) + object.tableId = message.tableId; + return object; + }; + + /** + * Converts this TableReference to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + * @returns {Object.} JSON object + */ + TableReference.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableReference; + })(); + + v1beta1.TableModifiers = (function() { + + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ + + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; + + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; + + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableModifiers; + })(); + + return v1beta1; + })(); + + return storage; + })(); + + return bigquery; + })(); + + return cloud; + })(); + + google.api = (function() { + + /** + * Namespace api. + * @memberof google + * @namespace + */ + var api = {}; + + api.Http = (function() { + + /** + * Properties of a Http. + * @memberof google.api + * @interface IHttp + * @property {Array.|null} [rules] Http rules + * @property {boolean|null} [fullyDecodeReservedExpansion] Http fullyDecodeReservedExpansion + */ + + /** + * Constructs a new Http. + * @memberof google.api + * @classdesc Represents a Http. + * @implements IHttp + * @constructor + * @param {google.api.IHttp=} [properties] Properties to set + */ + function Http(properties) { + this.rules = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Http rules. + * @member {Array.} rules + * @memberof google.api.Http + * @instance + */ + Http.prototype.rules = $util.emptyArray; + + /** + * Http fullyDecodeReservedExpansion. + * @member {boolean} fullyDecodeReservedExpansion + * @memberof google.api.Http + * @instance + */ + Http.prototype.fullyDecodeReservedExpansion = false; + + /** + * Creates a new Http instance using the specified properties. + * @function create + * @memberof google.api.Http + * @static + * @param {google.api.IHttp=} [properties] Properties to set + * @returns {google.api.Http} Http instance + */ + Http.create = function create(properties) { + return new Http(properties); + }; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encode + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.rules != null && message.rules.length) + for (var i = 0; i < message.rules.length; ++i) + $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); + return writer; + }; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Http message from the specified reader or buffer. + * @function decode + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.rules && message.rules.length)) + message.rules = []; + message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Http message. + * @function verify + * @memberof google.api.Http + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Http.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.rules != null && message.hasOwnProperty("rules")) { + if (!Array.isArray(message.rules)) + return "rules: array expected"; + for (var i = 0; i < message.rules.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.rules[i]); + if (error) + return "rules." + error; + } + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + if (typeof message.fullyDecodeReservedExpansion !== "boolean") + return "fullyDecodeReservedExpansion: boolean expected"; + return null; + }; + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.Http + * @static + * @param {Object.} object Plain object + * @returns {google.api.Http} Http + */ + Http.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.Http) + return object; + var message = new $root.google.api.Http(); + if (object.rules) { + if (!Array.isArray(object.rules)) + throw TypeError(".google.api.Http.rules: array expected"); + message.rules = []; + for (var i = 0; i < object.rules.length; ++i) { + if (typeof object.rules[i] !== "object") + throw TypeError(".google.api.Http.rules: object expected"); + message.rules[i] = $root.google.api.HttpRule.fromObject(object.rules[i]); + } + } + if (object.fullyDecodeReservedExpansion != null) + message.fullyDecodeReservedExpansion = Boolean(object.fullyDecodeReservedExpansion); + return message; + }; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.Http + * @static + * @param {google.api.Http} message Http + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Http.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.rules = []; + if (options.defaults) + object.fullyDecodeReservedExpansion = false; + if (message.rules && message.rules.length) { + object.rules = []; + for (var j = 0; j < message.rules.length; ++j) + object.rules[j] = $root.google.api.HttpRule.toObject(message.rules[j], options); + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + object.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + return object; + }; + + /** + * Converts this Http to JSON. + * @function toJSON + * @memberof google.api.Http + * @instance + * @returns {Object.} JSON object + */ + Http.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Http; + })(); + + api.HttpRule = (function() { + + /** + * Properties of a HttpRule. + * @memberof google.api + * @interface IHttpRule + * @property {string|null} [selector] HttpRule selector + * @property {string|null} [get] HttpRule get + * @property {string|null} [put] HttpRule put + * @property {string|null} [post] HttpRule post + * @property {string|null} ["delete"] HttpRule delete + * @property {string|null} [patch] HttpRule patch + * @property {google.api.ICustomHttpPattern|null} [custom] HttpRule custom + * @property {string|null} [body] HttpRule body + * @property {string|null} [responseBody] HttpRule responseBody + * @property {Array.|null} [additionalBindings] HttpRule additionalBindings + */ + + /** + * Constructs a new HttpRule. + * @memberof google.api + * @classdesc Represents a HttpRule. + * @implements IHttpRule + * @constructor + * @param {google.api.IHttpRule=} [properties] Properties to set + */ + function HttpRule(properties) { + this.additionalBindings = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * HttpRule selector. + * @member {string} selector + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.selector = ""; + + /** + * HttpRule get. + * @member {string} get + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.get = ""; + + /** + * HttpRule put. + * @member {string} put + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.put = ""; + + /** + * HttpRule post. + * @member {string} post + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.post = ""; + + /** + * HttpRule delete. + * @member {string} delete + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype["delete"] = ""; + + /** + * HttpRule patch. + * @member {string} patch + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.patch = ""; + + /** + * HttpRule custom. + * @member {google.api.ICustomHttpPattern|null|undefined} custom + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.custom = null; + + /** + * HttpRule body. + * @member {string} body + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.body = ""; + + /** + * HttpRule responseBody. + * @member {string} responseBody + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.responseBody = ""; + + /** + * HttpRule additionalBindings. + * @member {Array.} additionalBindings + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.additionalBindings = $util.emptyArray; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * HttpRule pattern. + * @member {"get"|"put"|"post"|"delete"|"patch"|"custom"|undefined} pattern + * @memberof google.api.HttpRule + * @instance + */ + Object.defineProperty(HttpRule.prototype, "pattern", { + get: $util.oneOfGetter($oneOfFields = ["get", "put", "post", "delete", "patch", "custom"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new HttpRule instance using the specified properties. + * @function create + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule=} [properties] Properties to set + * @returns {google.api.HttpRule} HttpRule instance + */ + HttpRule.create = function create(properties) { + return new HttpRule(properties); + }; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @function encode + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HttpRule.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selector != null && message.hasOwnProperty("selector")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); + if (message.get != null && message.hasOwnProperty("get")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); + if (message.put != null && message.hasOwnProperty("put")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); + if (message.post != null && message.hasOwnProperty("post")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); + if (message["delete"] != null && message.hasOwnProperty("delete")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); + if (message.patch != null && message.hasOwnProperty("patch")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); + if (message.body != null && message.hasOwnProperty("body")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); + if (message.custom != null && message.hasOwnProperty("custom")) + $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.additionalBindings != null && message.additionalBindings.length) + for (var i = 0; i < message.additionalBindings.length; ++i) + $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); + return writer; + }; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HttpRule.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @function decode + * @memberof google.api.HttpRule + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.HttpRule} HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HttpRule.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message["delete"] = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + if (!(message.additionalBindings && message.additionalBindings.length)) + message.additionalBindings = []; + message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.HttpRule + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.HttpRule} HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HttpRule.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a HttpRule message. + * @function verify + * @memberof google.api.HttpRule + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + HttpRule.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.selector != null && message.hasOwnProperty("selector")) + if (!$util.isString(message.selector)) + return "selector: string expected"; + if (message.get != null && message.hasOwnProperty("get")) { + properties.pattern = 1; + if (!$util.isString(message.get)) + return "get: string expected"; + } + if (message.put != null && message.hasOwnProperty("put")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.put)) + return "put: string expected"; + } + if (message.post != null && message.hasOwnProperty("post")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.post)) + return "post: string expected"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message["delete"])) + return "delete: string expected"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.patch)) + return "patch: string expected"; + } + if (message.custom != null && message.hasOwnProperty("custom")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + { + var error = $root.google.api.CustomHttpPattern.verify(message.custom); + if (error) + return "custom." + error; + } + } + if (message.body != null && message.hasOwnProperty("body")) + if (!$util.isString(message.body)) + return "body: string expected"; + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + if (!$util.isString(message.responseBody)) + return "responseBody: string expected"; + if (message.additionalBindings != null && message.hasOwnProperty("additionalBindings")) { + if (!Array.isArray(message.additionalBindings)) + return "additionalBindings: array expected"; + for (var i = 0; i < message.additionalBindings.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.additionalBindings[i]); + if (error) + return "additionalBindings." + error; + } + } + return null; + }; + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.HttpRule + * @static + * @param {Object.} object Plain object + * @returns {google.api.HttpRule} HttpRule + */ + HttpRule.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.HttpRule) + return object; + var message = new $root.google.api.HttpRule(); + if (object.selector != null) + message.selector = String(object.selector); + if (object.get != null) + message.get = String(object.get); + if (object.put != null) + message.put = String(object.put); + if (object.post != null) + message.post = String(object.post); + if (object["delete"] != null) + message["delete"] = String(object["delete"]); + if (object.patch != null) + message.patch = String(object.patch); + if (object.custom != null) { + if (typeof object.custom !== "object") + throw TypeError(".google.api.HttpRule.custom: object expected"); + message.custom = $root.google.api.CustomHttpPattern.fromObject(object.custom); + } + if (object.body != null) + message.body = String(object.body); + if (object.responseBody != null) + message.responseBody = String(object.responseBody); + if (object.additionalBindings) { + if (!Array.isArray(object.additionalBindings)) + throw TypeError(".google.api.HttpRule.additionalBindings: array expected"); + message.additionalBindings = []; + for (var i = 0; i < object.additionalBindings.length; ++i) { + if (typeof object.additionalBindings[i] !== "object") + throw TypeError(".google.api.HttpRule.additionalBindings: object expected"); + message.additionalBindings[i] = $root.google.api.HttpRule.fromObject(object.additionalBindings[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.HttpRule + * @static + * @param {google.api.HttpRule} message HttpRule + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + HttpRule.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.additionalBindings = []; + if (options.defaults) { + object.selector = ""; + object.body = ""; + object.responseBody = ""; + } + if (message.selector != null && message.hasOwnProperty("selector")) + object.selector = message.selector; + if (message.get != null && message.hasOwnProperty("get")) { + object.get = message.get; + if (options.oneofs) + object.pattern = "get"; + } + if (message.put != null && message.hasOwnProperty("put")) { + object.put = message.put; + if (options.oneofs) + object.pattern = "put"; + } + if (message.post != null && message.hasOwnProperty("post")) { + object.post = message.post; + if (options.oneofs) + object.pattern = "post"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + object["delete"] = message["delete"]; + if (options.oneofs) + object.pattern = "delete"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + object.patch = message.patch; + if (options.oneofs) + object.pattern = "patch"; + } + if (message.body != null && message.hasOwnProperty("body")) + object.body = message.body; + if (message.custom != null && message.hasOwnProperty("custom")) { + object.custom = $root.google.api.CustomHttpPattern.toObject(message.custom, options); + if (options.oneofs) + object.pattern = "custom"; + } + if (message.additionalBindings && message.additionalBindings.length) { + object.additionalBindings = []; + for (var j = 0; j < message.additionalBindings.length; ++j) + object.additionalBindings[j] = $root.google.api.HttpRule.toObject(message.additionalBindings[j], options); + } + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + object.responseBody = message.responseBody; + return object; + }; + + /** + * Converts this HttpRule to JSON. + * @function toJSON + * @memberof google.api.HttpRule + * @instance + * @returns {Object.} JSON object + */ + HttpRule.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return HttpRule; + })(); + + api.CustomHttpPattern = (function() { + + /** + * Properties of a CustomHttpPattern. + * @memberof google.api + * @interface ICustomHttpPattern + * @property {string|null} [kind] CustomHttpPattern kind + * @property {string|null} [path] CustomHttpPattern path + */ + + /** + * Constructs a new CustomHttpPattern. + * @memberof google.api + * @classdesc Represents a CustomHttpPattern. + * @implements ICustomHttpPattern + * @constructor + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + */ + function CustomHttpPattern(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CustomHttpPattern kind. + * @member {string} kind + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.kind = ""; + + /** + * CustomHttpPattern path. + * @member {string} path + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.path = ""; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @function create + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + * @returns {google.api.CustomHttpPattern} CustomHttpPattern instance + */ + CustomHttpPattern.create = function create(properties) { + return new CustomHttpPattern(properties); + }; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encode + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CustomHttpPattern.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.kind != null && message.hasOwnProperty("kind")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); + if (message.path != null && message.hasOwnProperty("path")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); + return writer; + }; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CustomHttpPattern.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @function decode + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CustomHttpPattern message. + * @function verify + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CustomHttpPattern.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.kind != null && message.hasOwnProperty("kind")) + if (!$util.isString(message.kind)) + return "kind: string expected"; + if (message.path != null && message.hasOwnProperty("path")) + if (!$util.isString(message.path)) + return "path: string expected"; + return null; + }; + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} object Plain object + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + */ + CustomHttpPattern.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.CustomHttpPattern) + return object; + var message = new $root.google.api.CustomHttpPattern(); + if (object.kind != null) + message.kind = String(object.kind); + if (object.path != null) + message.path = String(object.path); + return message; + }; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.CustomHttpPattern} message CustomHttpPattern + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CustomHttpPattern.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.kind = ""; + object.path = ""; + } + if (message.kind != null && message.hasOwnProperty("kind")) + object.kind = message.kind; + if (message.path != null && message.hasOwnProperty("path")) + object.path = message.path; + return object; + }; + + /** + * Converts this CustomHttpPattern to JSON. + * @function toJSON + * @memberof google.api.CustomHttpPattern + * @instance + * @returns {Object.} JSON object + */ + CustomHttpPattern.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return CustomHttpPattern; + })(); + + /** + * FieldBehavior enum. + * @name google.api.FieldBehavior + * @enum {string} + * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value + * @property {number} OPTIONAL=1 OPTIONAL value + * @property {number} REQUIRED=2 REQUIRED value + * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value + * @property {number} INPUT_ONLY=4 INPUT_ONLY value + * @property {number} IMMUTABLE=5 IMMUTABLE value + */ + api.FieldBehavior = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "FIELD_BEHAVIOR_UNSPECIFIED"] = 0; + values[valuesById[1] = "OPTIONAL"] = 1; + values[valuesById[2] = "REQUIRED"] = 2; + values[valuesById[3] = "OUTPUT_ONLY"] = 3; + values[valuesById[4] = "INPUT_ONLY"] = 4; + values[valuesById[5] = "IMMUTABLE"] = 5; + return values; + })(); + + api.ResourceDescriptor = (function() { + + /** + * Properties of a ResourceDescriptor. + * @memberof google.api + * @interface IResourceDescriptor + * @property {string|null} [type] ResourceDescriptor type + * @property {Array.|null} [pattern] ResourceDescriptor pattern + * @property {string|null} [nameField] ResourceDescriptor nameField + * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history + * @property {string|null} [plural] ResourceDescriptor plural + * @property {string|null} [singular] ResourceDescriptor singular + */ + + /** + * Constructs a new ResourceDescriptor. + * @memberof google.api + * @classdesc Represents a ResourceDescriptor. + * @implements IResourceDescriptor + * @constructor + * @param {google.api.IResourceDescriptor=} [properties] Properties to set + */ + function ResourceDescriptor(properties) { + this.pattern = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ResourceDescriptor type. + * @member {string} type + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.type = ""; + + /** + * ResourceDescriptor pattern. + * @member {Array.} pattern + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.pattern = $util.emptyArray; + + /** + * ResourceDescriptor nameField. + * @member {string} nameField + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.nameField = ""; + + /** + * ResourceDescriptor history. + * @member {google.api.ResourceDescriptor.History} history + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.history = 0; + + /** + * ResourceDescriptor plural. + * @member {string} plural + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.plural = ""; + + /** + * ResourceDescriptor singular. + * @member {string} singular + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.singular = ""; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. + * @function create + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor=} [properties] Properties to set + * @returns {google.api.ResourceDescriptor} ResourceDescriptor instance + */ + ResourceDescriptor.create = function create(properties) { + return new ResourceDescriptor(properties); + }; + + /** + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @function encode + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceDescriptor.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type != null && message.hasOwnProperty("type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.pattern != null && message.pattern.length) + for (var i = 0; i < message.pattern.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); + if (message.nameField != null && message.hasOwnProperty("nameField")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); + if (message.history != null && message.hasOwnProperty("history")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); + if (message.plural != null && message.hasOwnProperty("plural")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); + if (message.singular != null && message.hasOwnProperty("singular")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); + return writer; + }; + + /** + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceDescriptor.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer. + * @function decode + * @memberof google.api.ResourceDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceDescriptor.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + if (!(message.pattern && message.pattern.length)) + message.pattern = []; + message.pattern.push(reader.string()); + break; + case 3: + message.nameField = reader.string(); + break; + case 4: + message.history = reader.int32(); + break; + case 5: + message.plural = reader.string(); + break; + case 6: + message.singular = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ResourceDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceDescriptor.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ResourceDescriptor message. + * @function verify + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ResourceDescriptor.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.pattern != null && message.hasOwnProperty("pattern")) { + if (!Array.isArray(message.pattern)) + return "pattern: array expected"; + for (var i = 0; i < message.pattern.length; ++i) + if (!$util.isString(message.pattern[i])) + return "pattern: string[] expected"; + } + if (message.nameField != null && message.hasOwnProperty("nameField")) + if (!$util.isString(message.nameField)) + return "nameField: string expected"; + if (message.history != null && message.hasOwnProperty("history")) + switch (message.history) { + default: + return "history: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.plural != null && message.hasOwnProperty("plural")) + if (!$util.isString(message.plural)) + return "plural: string expected"; + if (message.singular != null && message.hasOwnProperty("singular")) + if (!$util.isString(message.singular)) + return "singular: string expected"; + return null; + }; + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} object Plain object + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + */ + ResourceDescriptor.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceDescriptor) + return object; + var message = new $root.google.api.ResourceDescriptor(); + if (object.type != null) + message.type = String(object.type); + if (object.pattern) { + if (!Array.isArray(object.pattern)) + throw TypeError(".google.api.ResourceDescriptor.pattern: array expected"); + message.pattern = []; + for (var i = 0; i < object.pattern.length; ++i) + message.pattern[i] = String(object.pattern[i]); + } + if (object.nameField != null) + message.nameField = String(object.nameField); + switch (object.history) { + case "HISTORY_UNSPECIFIED": + case 0: + message.history = 0; + break; + case "ORIGINALLY_SINGLE_PATTERN": + case 1: + message.history = 1; + break; + case "FUTURE_MULTI_PATTERN": + case 2: + message.history = 2; + break; + } + if (object.plural != null) + message.plural = String(object.plural); + if (object.singular != null) + message.singular = String(object.singular); + return message; + }; + + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.ResourceDescriptor} message ResourceDescriptor + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ResourceDescriptor.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.pattern = []; + if (options.defaults) { + object.type = ""; + object.nameField = ""; + object.history = options.enums === String ? "HISTORY_UNSPECIFIED" : 0; + object.plural = ""; + object.singular = ""; + } + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.pattern && message.pattern.length) { + object.pattern = []; + for (var j = 0; j < message.pattern.length; ++j) + object.pattern[j] = message.pattern[j]; + } + if (message.nameField != null && message.hasOwnProperty("nameField")) + object.nameField = message.nameField; + if (message.history != null && message.hasOwnProperty("history")) + object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] : message.history; + if (message.plural != null && message.hasOwnProperty("plural")) + object.plural = message.plural; + if (message.singular != null && message.hasOwnProperty("singular")) + object.singular = message.singular; + return object; + }; + + /** + * Converts this ResourceDescriptor to JSON. + * @function toJSON + * @memberof google.api.ResourceDescriptor + * @instance + * @returns {Object.} JSON object + */ + ResourceDescriptor.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * History enum. + * @name google.api.ResourceDescriptor.History + * @enum {string} + * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value + * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value + * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value + */ + ResourceDescriptor.History = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "HISTORY_UNSPECIFIED"] = 0; + values[valuesById[1] = "ORIGINALLY_SINGLE_PATTERN"] = 1; + values[valuesById[2] = "FUTURE_MULTI_PATTERN"] = 2; + return values; + })(); + + return ResourceDescriptor; + })(); + + api.ResourceReference = (function() { + + /** + * Properties of a ResourceReference. + * @memberof google.api + * @interface IResourceReference + * @property {string|null} [type] ResourceReference type + * @property {string|null} [childType] ResourceReference childType + */ + + /** + * Constructs a new ResourceReference. + * @memberof google.api + * @classdesc Represents a ResourceReference. + * @implements IResourceReference + * @constructor + * @param {google.api.IResourceReference=} [properties] Properties to set + */ + function ResourceReference(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ResourceReference type. + * @member {string} type + * @memberof google.api.ResourceReference + * @instance + */ + ResourceReference.prototype.type = ""; + + /** + * ResourceReference childType. + * @member {string} childType + * @memberof google.api.ResourceReference + * @instance + */ + ResourceReference.prototype.childType = ""; + + /** + * Creates a new ResourceReference instance using the specified properties. + * @function create + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference=} [properties] Properties to set + * @returns {google.api.ResourceReference} ResourceReference instance + */ + ResourceReference.create = function create(properties) { + return new ResourceReference(properties); + }; + + /** + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @function encode + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceReference.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type != null && message.hasOwnProperty("type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.childType != null && message.hasOwnProperty("childType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); + return writer; + }; + + /** + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceReference.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ResourceReference message from the specified reader or buffer. + * @function decode + * @memberof google.api.ResourceReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.ResourceReference} ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceReference.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.childType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ResourceReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ResourceReference} ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceReference.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ResourceReference message. + * @function verify + * @memberof google.api.ResourceReference + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ResourceReference.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.childType != null && message.hasOwnProperty("childType")) + if (!$util.isString(message.childType)) + return "childType: string expected"; + return null; + }; + + /** + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ResourceReference + * @static + * @param {Object.} object Plain object + * @returns {google.api.ResourceReference} ResourceReference + */ + ResourceReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceReference) + return object; + var message = new $root.google.api.ResourceReference(); + if (object.type != null) + message.type = String(object.type); + if (object.childType != null) + message.childType = String(object.childType); + return message; + }; + + /** + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ResourceReference + * @static + * @param {google.api.ResourceReference} message ResourceReference + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ResourceReference.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.type = ""; + object.childType = ""; + } + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.childType != null && message.hasOwnProperty("childType")) + object.childType = message.childType; + return object; + }; + + /** + * Converts this ResourceReference to JSON. + * @function toJSON + * @memberof google.api.ResourceReference + * @instance + * @returns {Object.} JSON object + */ + ResourceReference.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ResourceReference; + })(); + + return api; + })(); + + google.protobuf = (function() { + + /** + * Namespace protobuf. + * @memberof google + * @namespace + */ + var protobuf = {}; + + protobuf.FileDescriptorSet = (function() { + + /** + * Properties of a FileDescriptorSet. + * @memberof google.protobuf + * @interface IFileDescriptorSet + * @property {Array.|null} [file] FileDescriptorSet file + */ + + /** + * Constructs a new FileDescriptorSet. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorSet. + * @implements IFileDescriptorSet + * @constructor + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + */ + function FileDescriptorSet(properties) { + this.file = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorSet file. + * @member {Array.} file + * @memberof google.protobuf.FileDescriptorSet + * @instance + */ + FileDescriptorSet.prototype.file = $util.emptyArray; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet instance + */ + FileDescriptorSet.create = function create(properties) { + return new FileDescriptorSet(properties); + }; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.file != null && message.file.length) + for (var i = 0; i < message.file.length; ++i) + $root.google.protobuf.FileDescriptorProto.encode(message.file[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.file && message.file.length)) + message.file = []; + message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorSet message. + * @function verify + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorSet.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.file != null && message.hasOwnProperty("file")) { + if (!Array.isArray(message.file)) + return "file: array expected"; + for (var i = 0; i < message.file.length; ++i) { + var error = $root.google.protobuf.FileDescriptorProto.verify(message.file[i]); + if (error) + return "file." + error; + } + } + return null; + }; + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + */ + FileDescriptorSet.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorSet) + return object; + var message = new $root.google.protobuf.FileDescriptorSet(); + if (object.file) { + if (!Array.isArray(object.file)) + throw TypeError(".google.protobuf.FileDescriptorSet.file: array expected"); + message.file = []; + for (var i = 0; i < object.file.length; ++i) { + if (typeof object.file[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorSet.file: object expected"); + message.file[i] = $root.google.protobuf.FileDescriptorProto.fromObject(object.file[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.FileDescriptorSet} message FileDescriptorSet + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorSet.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.file = []; + if (message.file && message.file.length) { + object.file = []; + for (var j = 0; j < message.file.length; ++j) + object.file[j] = $root.google.protobuf.FileDescriptorProto.toObject(message.file[j], options); + } + return object; + }; + + /** + * Converts this FileDescriptorSet to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorSet + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorSet.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FileDescriptorSet; + })(); + + protobuf.FileDescriptorProto = (function() { + + /** + * Properties of a FileDescriptorProto. + * @memberof google.protobuf + * @interface IFileDescriptorProto + * @property {string|null} [name] FileDescriptorProto name + * @property {string|null} ["package"] FileDescriptorProto package + * @property {Array.|null} [dependency] FileDescriptorProto dependency + * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency + * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [messageType] FileDescriptorProto messageType + * @property {Array.|null} [enumType] FileDescriptorProto enumType + * @property {Array.|null} [service] FileDescriptorProto service + * @property {Array.|null} [extension] FileDescriptorProto extension + * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options + * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo + * @property {string|null} [syntax] FileDescriptorProto syntax + */ + + /** + * Constructs a new FileDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorProto. + * @implements IFileDescriptorProto + * @constructor + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + */ + function FileDescriptorProto(properties) { + this.dependency = []; + this.publicDependency = []; + this.weakDependency = []; + this.messageType = []; + this.enumType = []; + this.service = []; + this.extension = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.name = ""; + + /** + * FileDescriptorProto package. + * @member {string} package + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype["package"] = ""; + + /** + * FileDescriptorProto dependency. + * @member {Array.} dependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.dependency = $util.emptyArray; + + /** + * FileDescriptorProto publicDependency. + * @member {Array.} publicDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.publicDependency = $util.emptyArray; + + /** + * FileDescriptorProto weakDependency. + * @member {Array.} weakDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + + /** + * FileDescriptorProto messageType. + * @member {Array.} messageType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.messageType = $util.emptyArray; + + /** + * FileDescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * FileDescriptorProto service. + * @member {Array.} service + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.service = $util.emptyArray; + + /** + * FileDescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.extension = $util.emptyArray; + + /** + * FileDescriptorProto options. + * @member {google.protobuf.IFileOptions|null|undefined} options + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.options = null; + + /** + * FileDescriptorProto sourceCodeInfo. + * @member {google.protobuf.ISourceCodeInfo|null|undefined} sourceCodeInfo + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.sourceCodeInfo = null; + + /** + * FileDescriptorProto syntax. + * @member {string} syntax + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.syntax = ""; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto instance + */ + FileDescriptorProto.create = function create(properties) { + return new FileDescriptorProto(properties); + }; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message["package"] != null && message.hasOwnProperty("package")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); + if (message.dependency != null && message.dependency.length) + for (var i = 0; i < message.dependency.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.dependency[i]); + if (message.messageType != null && message.messageType.length) + for (var i = 0; i < message.messageType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.messageType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.service != null && message.service.length) + for (var i = 0; i < message.service.length; ++i) + $root.google.protobuf.ServiceDescriptorProto.encode(message.service[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.publicDependency != null && message.publicDependency.length) + for (var i = 0; i < message.publicDependency.length; ++i) + writer.uint32(/* id 10, wireType 0 =*/80).int32(message.publicDependency[i]); + if (message.weakDependency != null && message.weakDependency.length) + for (var i = 0; i < message.weakDependency.length; ++i) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); + if (message.syntax != null && message.hasOwnProperty("syntax")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); + return writer; + }; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message["package"] = reader.string(); + break; + case 3: + if (!(message.dependency && message.dependency.length)) + message.dependency = []; + message.dependency.push(reader.string()); + break; + case 10: + if (!(message.publicDependency && message.publicDependency.length)) + message.publicDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.publicDependency.push(reader.int32()); + } else + message.publicDependency.push(reader.int32()); + break; + case 11: + if (!(message.weakDependency && message.weakDependency.length)) + message.weakDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.weakDependency.push(reader.int32()); + } else + message.weakDependency.push(reader.int32()); + break; + case 4: + if (!(message.messageType && message.messageType.length)) + message.messageType = []; + message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + if (!(message.service && message.service.length)) + message.service = []; + message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorProto message. + * @function verify + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message["package"] != null && message.hasOwnProperty("package")) + if (!$util.isString(message["package"])) + return "package: string expected"; + if (message.dependency != null && message.hasOwnProperty("dependency")) { + if (!Array.isArray(message.dependency)) + return "dependency: array expected"; + for (var i = 0; i < message.dependency.length; ++i) + if (!$util.isString(message.dependency[i])) + return "dependency: string[] expected"; + } + if (message.publicDependency != null && message.hasOwnProperty("publicDependency")) { + if (!Array.isArray(message.publicDependency)) + return "publicDependency: array expected"; + for (var i = 0; i < message.publicDependency.length; ++i) + if (!$util.isInteger(message.publicDependency[i])) + return "publicDependency: integer[] expected"; + } + if (message.weakDependency != null && message.hasOwnProperty("weakDependency")) { + if (!Array.isArray(message.weakDependency)) + return "weakDependency: array expected"; + for (var i = 0; i < message.weakDependency.length; ++i) + if (!$util.isInteger(message.weakDependency[i])) + return "weakDependency: integer[] expected"; + } + if (message.messageType != null && message.hasOwnProperty("messageType")) { + if (!Array.isArray(message.messageType)) + return "messageType: array expected"; + for (var i = 0; i < message.messageType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.messageType[i]); + if (error) + return "messageType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.service != null && message.hasOwnProperty("service")) { + if (!Array.isArray(message.service)) + return "service: array expected"; + for (var i = 0; i < message.service.length; ++i) { + var error = $root.google.protobuf.ServiceDescriptorProto.verify(message.service[i]); + if (error) + return "service." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FileOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) { + var error = $root.google.protobuf.SourceCodeInfo.verify(message.sourceCodeInfo); + if (error) + return "sourceCodeInfo." + error; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + if (!$util.isString(message.syntax)) + return "syntax: string expected"; + return null; + }; + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + */ + FileDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorProto) + return object; + var message = new $root.google.protobuf.FileDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object["package"] != null) + message["package"] = String(object["package"]); + if (object.dependency) { + if (!Array.isArray(object.dependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.dependency: array expected"); + message.dependency = []; + for (var i = 0; i < object.dependency.length; ++i) + message.dependency[i] = String(object.dependency[i]); + } + if (object.publicDependency) { + if (!Array.isArray(object.publicDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.publicDependency: array expected"); + message.publicDependency = []; + for (var i = 0; i < object.publicDependency.length; ++i) + message.publicDependency[i] = object.publicDependency[i] | 0; + } + if (object.weakDependency) { + if (!Array.isArray(object.weakDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.weakDependency: array expected"); + message.weakDependency = []; + for (var i = 0; i < object.weakDependency.length; ++i) + message.weakDependency[i] = object.weakDependency[i] | 0; + } + if (object.messageType) { + if (!Array.isArray(object.messageType)) + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); + message.messageType = []; + for (var i = 0; i < object.messageType.length; ++i) { + if (typeof object.messageType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: object expected"); + message.messageType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.messageType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.service) { + if (!Array.isArray(object.service)) + throw TypeError(".google.protobuf.FileDescriptorProto.service: array expected"); + message.service = []; + for (var i = 0; i < object.service.length; ++i) { + if (typeof object.service[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.service: object expected"); + message.service[i] = $root.google.protobuf.ServiceDescriptorProto.fromObject(object.service[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.FileDescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FileOptions.fromObject(object.options); + } + if (object.sourceCodeInfo != null) { + if (typeof object.sourceCodeInfo !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.sourceCodeInfo: object expected"); + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.fromObject(object.sourceCodeInfo); + } + if (object.syntax != null) + message.syntax = String(object.syntax); + return message; + }; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.FileDescriptorProto} message FileDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.dependency = []; + object.messageType = []; + object.enumType = []; + object.service = []; + object.extension = []; + object.publicDependency = []; + object.weakDependency = []; + } + if (options.defaults) { + object.name = ""; + object["package"] = ""; + object.options = null; + object.sourceCodeInfo = null; + object.syntax = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message["package"] != null && message.hasOwnProperty("package")) + object["package"] = message["package"]; + if (message.dependency && message.dependency.length) { + object.dependency = []; + for (var j = 0; j < message.dependency.length; ++j) + object.dependency[j] = message.dependency[j]; + } + if (message.messageType && message.messageType.length) { + object.messageType = []; + for (var j = 0; j < message.messageType.length; ++j) + object.messageType[j] = $root.google.protobuf.DescriptorProto.toObject(message.messageType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.service && message.service.length) { + object.service = []; + for (var j = 0; j < message.service.length; ++j) + object.service[j] = $root.google.protobuf.ServiceDescriptorProto.toObject(message.service[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FileOptions.toObject(message.options, options); + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + object.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.toObject(message.sourceCodeInfo, options); + if (message.publicDependency && message.publicDependency.length) { + object.publicDependency = []; + for (var j = 0; j < message.publicDependency.length; ++j) + object.publicDependency[j] = message.publicDependency[j]; + } + if (message.weakDependency && message.weakDependency.length) { + object.weakDependency = []; + for (var j = 0; j < message.weakDependency.length; ++j) + object.weakDependency[j] = message.weakDependency[j]; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + object.syntax = message.syntax; + return object; + }; + + /** + * Converts this FileDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FileDescriptorProto; + })(); + + protobuf.DescriptorProto = (function() { + + /** + * Properties of a DescriptorProto. + * @memberof google.protobuf + * @interface IDescriptorProto + * @property {string|null} [name] DescriptorProto name + * @property {Array.|null} [field] DescriptorProto field + * @property {Array.|null} [extension] DescriptorProto extension + * @property {Array.|null} [nestedType] DescriptorProto nestedType + * @property {Array.|null} [enumType] DescriptorProto enumType + * @property {Array.|null} [extensionRange] DescriptorProto extensionRange + * @property {Array.|null} [oneofDecl] DescriptorProto oneofDecl + * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options + * @property {Array.|null} [reservedRange] DescriptorProto reservedRange + * @property {Array.|null} [reservedName] DescriptorProto reservedName + */ + + /** + * Constructs a new DescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a DescriptorProto. + * @implements IDescriptorProto + * @constructor + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + */ + function DescriptorProto(properties) { + this.field = []; + this.extension = []; + this.nestedType = []; + this.enumType = []; + this.extensionRange = []; + this.oneofDecl = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DescriptorProto name. + * @member {string} name + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.name = ""; + + /** + * DescriptorProto field. + * @member {Array.} field + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.field = $util.emptyArray; + + /** + * DescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extension = $util.emptyArray; + + /** + * DescriptorProto nestedType. + * @member {Array.} nestedType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.nestedType = $util.emptyArray; + + /** + * DescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * DescriptorProto extensionRange. + * @member {Array.} extensionRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extensionRange = $util.emptyArray; + + /** + * DescriptorProto oneofDecl. + * @member {Array.} oneofDecl + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.oneofDecl = $util.emptyArray; + + /** + * DescriptorProto options. + * @member {google.protobuf.IMessageOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.options = null; + + /** + * DescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedRange = $util.emptyArray; + + /** + * DescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedName = $util.emptyArray; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto} DescriptorProto instance + */ + DescriptorProto.create = function create(properties) { + return new DescriptorProto(properties); + }; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.field != null && message.field.length) + for (var i = 0; i < message.field.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.field[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.nestedType != null && message.nestedType.length) + for (var i = 0; i < message.nestedType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.nestedType[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.extensionRange != null && message.extensionRange.length) + for (var i = 0; i < message.extensionRange.length; ++i) + $root.google.protobuf.DescriptorProto.ExtensionRange.encode(message.extensionRange[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.oneofDecl != null && message.oneofDecl.length) + for (var i = 0; i < message.oneofDecl.length; ++i) + $root.google.protobuf.OneofDescriptorProto.encode(message.oneofDecl[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.DescriptorProto.ReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + return writer; + }; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + if (!(message.field && message.field.length)) + message.field = []; + message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + if (!(message.nestedType && message.nestedType.length)) + message.nestedType = []; + message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.extensionRange && message.extensionRange.length)) + message.extensionRange = []; + message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + if (!(message.oneofDecl && message.oneofDecl.length)) + message.oneofDecl = []; + message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DescriptorProto message. + * @function verify + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.field != null && message.hasOwnProperty("field")) { + if (!Array.isArray(message.field)) + return "field: array expected"; + for (var i = 0; i < message.field.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.field[i]); + if (error) + return "field." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.nestedType != null && message.hasOwnProperty("nestedType")) { + if (!Array.isArray(message.nestedType)) + return "nestedType: array expected"; + for (var i = 0; i < message.nestedType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.nestedType[i]); + if (error) + return "nestedType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.extensionRange != null && message.hasOwnProperty("extensionRange")) { + if (!Array.isArray(message.extensionRange)) + return "extensionRange: array expected"; + for (var i = 0; i < message.extensionRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ExtensionRange.verify(message.extensionRange[i]); + if (error) + return "extensionRange." + error; + } + } + if (message.oneofDecl != null && message.hasOwnProperty("oneofDecl")) { + if (!Array.isArray(message.oneofDecl)) + return "oneofDecl: array expected"; + for (var i = 0; i < message.oneofDecl.length; ++i) { + var error = $root.google.protobuf.OneofDescriptorProto.verify(message.oneofDecl[i]); + if (error) + return "oneofDecl." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MessageOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto} DescriptorProto + */ + DescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto) + return object; + var message = new $root.google.protobuf.DescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.field) { + if (!Array.isArray(object.field)) + throw TypeError(".google.protobuf.DescriptorProto.field: array expected"); + message.field = []; + for (var i = 0; i < object.field.length; ++i) { + if (typeof object.field[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.field: object expected"); + message.field[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.field[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.DescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.nestedType) { + if (!Array.isArray(object.nestedType)) + throw TypeError(".google.protobuf.DescriptorProto.nestedType: array expected"); + message.nestedType = []; + for (var i = 0; i < object.nestedType.length; ++i) { + if (typeof object.nestedType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.nestedType: object expected"); + message.nestedType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.nestedType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.DescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.extensionRange) { + if (!Array.isArray(object.extensionRange)) + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: array expected"); + message.extensionRange = []; + for (var i = 0; i < object.extensionRange.length; ++i) { + if (typeof object.extensionRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: object expected"); + message.extensionRange[i] = $root.google.protobuf.DescriptorProto.ExtensionRange.fromObject(object.extensionRange[i]); + } + } + if (object.oneofDecl) { + if (!Array.isArray(object.oneofDecl)) + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: array expected"); + message.oneofDecl = []; + for (var i = 0; i < object.oneofDecl.length; ++i) { + if (typeof object.oneofDecl[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: object expected"); + message.oneofDecl[i] = $root.google.protobuf.OneofDescriptorProto.fromObject(object.oneofDecl[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MessageOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.DescriptorProto.ReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.DescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.DescriptorProto} message DescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.field = []; + object.nestedType = []; + object.enumType = []; + object.extensionRange = []; + object.extension = []; + object.oneofDecl = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.field && message.field.length) { + object.field = []; + for (var j = 0; j < message.field.length; ++j) + object.field[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.field[j], options); + } + if (message.nestedType && message.nestedType.length) { + object.nestedType = []; + for (var j = 0; j < message.nestedType.length; ++j) + object.nestedType[j] = $root.google.protobuf.DescriptorProto.toObject(message.nestedType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.extensionRange && message.extensionRange.length) { + object.extensionRange = []; + for (var j = 0; j < message.extensionRange.length; ++j) + object.extensionRange[j] = $root.google.protobuf.DescriptorProto.ExtensionRange.toObject(message.extensionRange[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MessageOptions.toObject(message.options, options); + if (message.oneofDecl && message.oneofDecl.length) { + object.oneofDecl = []; + for (var j = 0; j < message.oneofDecl.length; ++j) + object.oneofDecl[j] = $root.google.protobuf.OneofDescriptorProto.toObject(message.oneofDecl[j], options); + } + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.DescriptorProto.ReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; + }; + + /** + * Converts this DescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto + * @instance + * @returns {Object.} JSON object + */ + DescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + DescriptorProto.ExtensionRange = (function() { + + /** + * Properties of an ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @interface IExtensionRange + * @property {number|null} [start] ExtensionRange start + * @property {number|null} [end] ExtensionRange end + * @property {google.protobuf.IExtensionRangeOptions|null} [options] ExtensionRange options + */ + + /** + * Constructs a new ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents an ExtensionRange. + * @implements IExtensionRange + * @constructor + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + */ + function ExtensionRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.start = 0; + + /** + * ExtensionRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.end = 0; + + /** + * ExtensionRange options. + * @member {google.protobuf.IExtensionRangeOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.options = null; + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange instance + */ + ExtensionRange.create = function create(properties) { + return new ExtensionRange(properties); + }; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && message.hasOwnProperty("start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && message.hasOwnProperty("end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ExtensionRangeOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + */ + ExtensionRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ExtensionRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected"); + message.options = $root.google.protobuf.ExtensionRangeOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.ExtensionRange} message ExtensionRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + object.options = null; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ExtensionRangeOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this ExtensionRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + * @returns {Object.} JSON object + */ + ExtensionRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ExtensionRange; + })(); + + DescriptorProto.ReservedRange = (function() { + + /** + * Properties of a ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @interface IReservedRange + * @property {number|null} [start] ReservedRange start + * @property {number|null} [end] ReservedRange end + */ + + /** + * Constructs a new ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents a ReservedRange. + * @implements IReservedRange + * @constructor + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + */ + function ReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReservedRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.start = 0; + + /** + * ReservedRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.end = 0; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange instance + */ + ReservedRange.create = function create(properties) { + return new ReservedRange(properties); + }; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && message.hasOwnProperty("start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && message.hasOwnProperty("end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReservedRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + */ + ReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ReservedRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.ReservedRange} message ReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this ReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + * @returns {Object.} JSON object + */ + ReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReservedRange; + })(); + + return DescriptorProto; + })(); + + protobuf.ExtensionRangeOptions = (function() { + + /** + * Properties of an ExtensionRangeOptions. + * @memberof google.protobuf + * @interface IExtensionRangeOptions + * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption + */ + + /** + * Constructs a new ExtensionRangeOptions. + * @memberof google.protobuf + * @classdesc Represents an ExtensionRangeOptions. + * @implements IExtensionRangeOptions + * @constructor + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + */ + function ExtensionRangeOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRangeOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions instance + */ + ExtensionRangeOptions.create = function create(properties) { + return new ExtensionRangeOptions(properties); + }; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRangeOptions message. + * @function verify + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRangeOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + */ + ExtensionRangeOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ExtensionRangeOptions) + return object; + var message = new $root.google.protobuf.ExtensionRangeOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.ExtensionRangeOptions} message ExtensionRangeOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRangeOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @function toJSON + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + * @returns {Object.} JSON object + */ + ExtensionRangeOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ExtensionRangeOptions; + })(); + + protobuf.FieldDescriptorProto = (function() { + + /** + * Properties of a FieldDescriptorProto. + * @memberof google.protobuf + * @interface IFieldDescriptorProto + * @property {string|null} [name] FieldDescriptorProto name + * @property {number|null} [number] FieldDescriptorProto number + * @property {google.protobuf.FieldDescriptorProto.Label|null} [label] FieldDescriptorProto label + * @property {google.protobuf.FieldDescriptorProto.Type|null} [type] FieldDescriptorProto type + * @property {string|null} [typeName] FieldDescriptorProto typeName + * @property {string|null} [extendee] FieldDescriptorProto extendee + * @property {string|null} [defaultValue] FieldDescriptorProto defaultValue + * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex + * @property {string|null} [jsonName] FieldDescriptorProto jsonName + * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options + */ + + /** + * Constructs a new FieldDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FieldDescriptorProto. + * @implements IFieldDescriptorProto + * @constructor + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + */ + function FieldDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.name = ""; + + /** + * FieldDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.number = 0; + + /** + * FieldDescriptorProto label. + * @member {google.protobuf.FieldDescriptorProto.Label} label + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.label = 1; + + /** + * FieldDescriptorProto type. + * @member {google.protobuf.FieldDescriptorProto.Type} type + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.type = 1; + + /** + * FieldDescriptorProto typeName. + * @member {string} typeName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.typeName = ""; + + /** + * FieldDescriptorProto extendee. + * @member {string} extendee + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.extendee = ""; + + /** + * FieldDescriptorProto defaultValue. + * @member {string} defaultValue + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.defaultValue = ""; + + /** + * FieldDescriptorProto oneofIndex. + * @member {number} oneofIndex + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.oneofIndex = 0; + + /** + * FieldDescriptorProto jsonName. + * @member {string} jsonName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.jsonName = ""; + + /** + * FieldDescriptorProto options. + * @member {google.protobuf.IFieldOptions|null|undefined} options + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.options = null; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto instance + */ + FieldDescriptorProto.create = function create(properties) { + return new FieldDescriptorProto(properties); + }; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.extendee != null && message.hasOwnProperty("extendee")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); + if (message.number != null && message.hasOwnProperty("number")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); + if (message.label != null && message.hasOwnProperty("label")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); + if (message.type != null && message.hasOwnProperty("type")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); + if (message.typeName != null && message.hasOwnProperty("typeName")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); + return writer; + }; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32(); + break; + case 5: + message.type = reader.int32(); + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldDescriptorProto message. + * @function verify + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.label != null && message.hasOwnProperty("label")) + switch (message.label) { + default: + return "label: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + break; + } + if (message.typeName != null && message.hasOwnProperty("typeName")) + if (!$util.isString(message.typeName)) + return "typeName: string expected"; + if (message.extendee != null && message.hasOwnProperty("extendee")) + if (!$util.isString(message.extendee)) + return "extendee: string expected"; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + if (!$util.isString(message.defaultValue)) + return "defaultValue: string expected"; + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + if (!$util.isInteger(message.oneofIndex)) + return "oneofIndex: integer expected"; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + if (!$util.isString(message.jsonName)) + return "jsonName: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FieldOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + */ + FieldDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldDescriptorProto) + return object; + var message = new $root.google.protobuf.FieldDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + switch (object.label) { + case "LABEL_OPTIONAL": + case 1: + message.label = 1; + break; + case "LABEL_REQUIRED": + case 2: + message.label = 2; + break; + case "LABEL_REPEATED": + case 3: + message.label = 3; + break; + } + switch (object.type) { + case "TYPE_DOUBLE": + case 1: + message.type = 1; + break; + case "TYPE_FLOAT": + case 2: + message.type = 2; + break; + case "TYPE_INT64": + case 3: + message.type = 3; + break; + case "TYPE_UINT64": + case 4: + message.type = 4; + break; + case "TYPE_INT32": + case 5: + message.type = 5; + break; + case "TYPE_FIXED64": + case 6: + message.type = 6; + break; + case "TYPE_FIXED32": + case 7: + message.type = 7; + break; + case "TYPE_BOOL": + case 8: + message.type = 8; + break; + case "TYPE_STRING": + case 9: + message.type = 9; + break; + case "TYPE_GROUP": + case 10: + message.type = 10; + break; + case "TYPE_MESSAGE": + case 11: + message.type = 11; + break; + case "TYPE_BYTES": + case 12: + message.type = 12; + break; + case "TYPE_UINT32": + case 13: + message.type = 13; + break; + case "TYPE_ENUM": + case 14: + message.type = 14; + break; + case "TYPE_SFIXED32": + case 15: + message.type = 15; + break; + case "TYPE_SFIXED64": + case 16: + message.type = 16; + break; + case "TYPE_SINT32": + case 17: + message.type = 17; + break; + case "TYPE_SINT64": + case 18: + message.type = 18; + break; + } + if (object.typeName != null) + message.typeName = String(object.typeName); + if (object.extendee != null) + message.extendee = String(object.extendee); + if (object.defaultValue != null) + message.defaultValue = String(object.defaultValue); + if (object.oneofIndex != null) + message.oneofIndex = object.oneofIndex | 0; + if (object.jsonName != null) + message.jsonName = String(object.jsonName); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.FieldDescriptorProto} message FieldDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.extendee = ""; + object.number = 0; + object.label = options.enums === String ? "LABEL_OPTIONAL" : 1; + object.type = options.enums === String ? "TYPE_DOUBLE" : 1; + object.typeName = ""; + object.defaultValue = ""; + object.options = null; + object.oneofIndex = 0; + object.jsonName = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.extendee != null && message.hasOwnProperty("extendee")) + object.extendee = message.extendee; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.label != null && message.hasOwnProperty("label")) + object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; + if (message.typeName != null && message.hasOwnProperty("typeName")) + object.typeName = message.typeName; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + object.defaultValue = message.defaultValue; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FieldOptions.toObject(message.options, options); + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + object.oneofIndex = message.oneofIndex; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + object.jsonName = message.jsonName; + return object; + }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FieldDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FieldDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Type enum. + * @name google.protobuf.FieldDescriptorProto.Type + * @enum {string} + * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value + * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value + * @property {number} TYPE_INT64=3 TYPE_INT64 value + * @property {number} TYPE_UINT64=4 TYPE_UINT64 value + * @property {number} TYPE_INT32=5 TYPE_INT32 value + * @property {number} TYPE_FIXED64=6 TYPE_FIXED64 value + * @property {number} TYPE_FIXED32=7 TYPE_FIXED32 value + * @property {number} TYPE_BOOL=8 TYPE_BOOL value + * @property {number} TYPE_STRING=9 TYPE_STRING value + * @property {number} TYPE_GROUP=10 TYPE_GROUP value + * @property {number} TYPE_MESSAGE=11 TYPE_MESSAGE value + * @property {number} TYPE_BYTES=12 TYPE_BYTES value + * @property {number} TYPE_UINT32=13 TYPE_UINT32 value + * @property {number} TYPE_ENUM=14 TYPE_ENUM value + * @property {number} TYPE_SFIXED32=15 TYPE_SFIXED32 value + * @property {number} TYPE_SFIXED64=16 TYPE_SFIXED64 value + * @property {number} TYPE_SINT32=17 TYPE_SINT32 value + * @property {number} TYPE_SINT64=18 TYPE_SINT64 value + */ + FieldDescriptorProto.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "TYPE_DOUBLE"] = 1; + values[valuesById[2] = "TYPE_FLOAT"] = 2; + values[valuesById[3] = "TYPE_INT64"] = 3; + values[valuesById[4] = "TYPE_UINT64"] = 4; + values[valuesById[5] = "TYPE_INT32"] = 5; + values[valuesById[6] = "TYPE_FIXED64"] = 6; + values[valuesById[7] = "TYPE_FIXED32"] = 7; + values[valuesById[8] = "TYPE_BOOL"] = 8; + values[valuesById[9] = "TYPE_STRING"] = 9; + values[valuesById[10] = "TYPE_GROUP"] = 10; + values[valuesById[11] = "TYPE_MESSAGE"] = 11; + values[valuesById[12] = "TYPE_BYTES"] = 12; + values[valuesById[13] = "TYPE_UINT32"] = 13; + values[valuesById[14] = "TYPE_ENUM"] = 14; + values[valuesById[15] = "TYPE_SFIXED32"] = 15; + values[valuesById[16] = "TYPE_SFIXED64"] = 16; + values[valuesById[17] = "TYPE_SINT32"] = 17; + values[valuesById[18] = "TYPE_SINT64"] = 18; + return values; + })(); + + /** + * Label enum. + * @name google.protobuf.FieldDescriptorProto.Label + * @enum {string} + * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value + * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value + * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value + */ + FieldDescriptorProto.Label = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "LABEL_OPTIONAL"] = 1; + values[valuesById[2] = "LABEL_REQUIRED"] = 2; + values[valuesById[3] = "LABEL_REPEATED"] = 3; + return values; + })(); + + return FieldDescriptorProto; + })(); + + protobuf.OneofDescriptorProto = (function() { + + /** + * Properties of an OneofDescriptorProto. + * @memberof google.protobuf + * @interface IOneofDescriptorProto + * @property {string|null} [name] OneofDescriptorProto name + * @property {google.protobuf.IOneofOptions|null} [options] OneofDescriptorProto options + */ + + /** + * Constructs a new OneofDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an OneofDescriptorProto. + * @implements IOneofDescriptorProto + * @constructor + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + */ + function OneofDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * OneofDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.name = ""; + + /** + * OneofDescriptorProto options. + * @member {google.protobuf.IOneofOptions|null|undefined} options + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.options = null; + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto instance + */ + OneofDescriptorProto.create = function create(properties) { + return new OneofDescriptorProto(properties); + }; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an OneofDescriptorProto message. + * @function verify + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + OneofDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.OneofOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + */ + OneofDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofDescriptorProto) + return object; + var message = new $root.google.protobuf.OneofDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.OneofOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.OneofDescriptorProto} message OneofDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + OneofDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.OneofOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.OneofDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + OneofDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return OneofDescriptorProto; + })(); + + protobuf.EnumDescriptorProto = (function() { + + /** + * Properties of an EnumDescriptorProto. + * @memberof google.protobuf + * @interface IEnumDescriptorProto + * @property {string|null} [name] EnumDescriptorProto name + * @property {Array.|null} [value] EnumDescriptorProto value + * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options + * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange + * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + */ + + /** + * Constructs a new EnumDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumDescriptorProto. + * @implements IEnumDescriptorProto + * @constructor + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + */ + function EnumDescriptorProto(properties) { + this.value = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.name = ""; + + /** + * EnumDescriptorProto value. + * @member {Array.} value + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.value = $util.emptyArray; + + /** + * EnumDescriptorProto options. + * @member {google.protobuf.IEnumOptions|null|undefined} options + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.options = null; + + /** + * EnumDescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.reservedRange = $util.emptyArray; + + /** + * EnumDescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto instance + */ + EnumDescriptorProto.create = function create(properties) { + return new EnumDescriptorProto(properties); + }; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.value != null && message.value.length) + for (var i = 0; i < message.value.length; ++i) + $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + return writer; + }; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + if (!(message.value && message.value.length)) + message.value = []; + message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumDescriptorProto message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.value != null && message.hasOwnProperty("value")) { + if (!Array.isArray(message.value)) + return "value: array expected"; + for (var i = 0; i < message.value.length; ++i) { + var error = $root.google.protobuf.EnumValueDescriptorProto.verify(message.value[i]); + if (error) + return "value." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + */ + EnumDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.value) { + if (!Array.isArray(object.value)) + throw TypeError(".google.protobuf.EnumDescriptorProto.value: array expected"); + message.value = []; + for (var i = 0; i < object.value.length; ++i) { + if (typeof object.value[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.value: object expected"); + message.value[i] = $root.google.protobuf.EnumValueDescriptorProto.fromObject(object.value[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.EnumDescriptorProto} message EnumDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.value = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.value && message.value.length) { + object.value = []; + for (var j = 0; j < message.value.length; ++j) + object.value[j] = $root.google.protobuf.EnumValueDescriptorProto.toObject(message.value[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumOptions.toObject(message.options, options); + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; + }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + EnumDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + EnumDescriptorProto.EnumReservedRange = (function() { + + /** + * Properties of an EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @interface IEnumReservedRange + * @property {number|null} [start] EnumReservedRange start + * @property {number|null} [end] EnumReservedRange end + */ + + /** + * Constructs a new EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @classdesc Represents an EnumReservedRange. + * @implements IEnumReservedRange + * @constructor + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + */ + function EnumReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumReservedRange start. + * @member {number} start + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.start = 0; + + /** + * EnumReservedRange end. + * @member {number} end + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.end = 0; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange instance + */ + EnumReservedRange.create = function create(properties) { + return new EnumReservedRange(properties); + }; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && message.hasOwnProperty("start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && message.hasOwnProperty("end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumReservedRange message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + */ + EnumReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto.EnumReservedRange) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.EnumReservedRange} message EnumReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this EnumReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + * @returns {Object.} JSON object + */ + EnumReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return EnumReservedRange; + })(); + + return EnumDescriptorProto; + })(); + + protobuf.EnumValueDescriptorProto = (function() { + + /** + * Properties of an EnumValueDescriptorProto. + * @memberof google.protobuf + * @interface IEnumValueDescriptorProto + * @property {string|null} [name] EnumValueDescriptorProto name + * @property {number|null} [number] EnumValueDescriptorProto number + * @property {google.protobuf.IEnumValueOptions|null} [options] EnumValueDescriptorProto options + */ + + /** + * Constructs a new EnumValueDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumValueDescriptorProto. + * @implements IEnumValueDescriptorProto + * @constructor + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set + */ + function EnumValueDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumValueDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.name = ""; + + /** + * EnumValueDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.number = 0; + + /** + * EnumValueDescriptorProto options. + * @member {google.protobuf.IEnumValueOptions|null|undefined} options + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.options = null; + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto instance + */ + EnumValueDescriptorProto.create = function create(properties) { + return new EnumValueDescriptorProto(properties); + }; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.number != null && message.hasOwnProperty("number")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumValueDescriptorProto message. + * @function verify + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumValueDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumValueOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + */ + EnumValueDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueDescriptorProto) + return object; + var message = new $root.google.protobuf.EnumValueDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumValueOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.EnumValueDescriptorProto} message EnumValueDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumValueDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.number = 0; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumValueOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + EnumValueDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return EnumValueDescriptorProto; + })(); + + protobuf.ServiceDescriptorProto = (function() { + + /** + * Properties of a ServiceDescriptorProto. + * @memberof google.protobuf + * @interface IServiceDescriptorProto + * @property {string|null} [name] ServiceDescriptorProto name + * @property {Array.|null} [method] ServiceDescriptorProto method + * @property {google.protobuf.IServiceOptions|null} [options] ServiceDescriptorProto options + */ + + /** + * Constructs a new ServiceDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a ServiceDescriptorProto. + * @implements IServiceDescriptorProto + * @constructor + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + */ + function ServiceDescriptorProto(properties) { + this.method = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ServiceDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.name = ""; + + /** + * ServiceDescriptorProto method. + * @member {Array.} method + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.method = $util.emptyArray; + + /** + * ServiceDescriptorProto options. + * @member {google.protobuf.IServiceOptions|null|undefined} options + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.options = null; + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto instance + */ + ServiceDescriptorProto.create = function create(properties) { + return new ServiceDescriptorProto(properties); + }; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.method != null && message.method.length) + for (var i = 0; i < message.method.length; ++i) + $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + if (!(message.method && message.method.length)) + message.method = []; + message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ServiceDescriptorProto message. + * @function verify + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ServiceDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.method != null && message.hasOwnProperty("method")) { + if (!Array.isArray(message.method)) + return "method: array expected"; + for (var i = 0; i < message.method.length; ++i) { + var error = $root.google.protobuf.MethodDescriptorProto.verify(message.method[i]); + if (error) + return "method." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ServiceOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + */ + ServiceDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceDescriptorProto) + return object; + var message = new $root.google.protobuf.ServiceDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.method) { + if (!Array.isArray(object.method)) + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: array expected"); + message.method = []; + for (var i = 0; i < object.method.length; ++i) { + if (typeof object.method[i] !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: object expected"); + message.method[i] = $root.google.protobuf.MethodDescriptorProto.fromObject(object.method[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.ServiceOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.ServiceDescriptorProto} message ServiceDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ServiceDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.method = []; + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.method && message.method.length) { + object.method = []; + for (var j = 0; j < message.method.length; ++j) + object.method[j] = $root.google.protobuf.MethodDescriptorProto.toObject(message.method[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ServiceOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + ServiceDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ServiceDescriptorProto; + })(); + + protobuf.MethodDescriptorProto = (function() { + + /** + * Properties of a MethodDescriptorProto. + * @memberof google.protobuf + * @interface IMethodDescriptorProto + * @property {string|null} [name] MethodDescriptorProto name + * @property {string|null} [inputType] MethodDescriptorProto inputType + * @property {string|null} [outputType] MethodDescriptorProto outputType + * @property {google.protobuf.IMethodOptions|null} [options] MethodDescriptorProto options + * @property {boolean|null} [clientStreaming] MethodDescriptorProto clientStreaming + * @property {boolean|null} [serverStreaming] MethodDescriptorProto serverStreaming + */ + + /** + * Constructs a new MethodDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a MethodDescriptorProto. + * @implements IMethodDescriptorProto + * @constructor + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set + */ + function MethodDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MethodDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.name = ""; + + /** + * MethodDescriptorProto inputType. + * @member {string} inputType + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.inputType = ""; + + /** + * MethodDescriptorProto outputType. + * @member {string} outputType + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.outputType = ""; + + /** + * MethodDescriptorProto options. + * @member {google.protobuf.IMethodOptions|null|undefined} options + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.options = null; + + /** + * MethodDescriptorProto clientStreaming. + * @member {boolean} clientStreaming + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.clientStreaming = false; + + /** + * MethodDescriptorProto serverStreaming. + * @member {boolean} serverStreaming + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.serverStreaming = false; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto instance + */ + MethodDescriptorProto.create = function create(properties) { + return new MethodDescriptorProto(properties); + }; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.inputType != null && message.hasOwnProperty("inputType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); + if (message.outputType != null && message.hasOwnProperty("outputType")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); + if (message.options != null && message.hasOwnProperty("options")) + $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); + return writer; + }; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MethodDescriptorProto message. + * @function verify + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MethodDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.inputType != null && message.hasOwnProperty("inputType")) + if (!$util.isString(message.inputType)) + return "inputType: string expected"; + if (message.outputType != null && message.hasOwnProperty("outputType")) + if (!$util.isString(message.outputType)) + return "outputType: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MethodOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + if (typeof message.clientStreaming !== "boolean") + return "clientStreaming: boolean expected"; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + if (typeof message.serverStreaming !== "boolean") + return "serverStreaming: boolean expected"; + return null; + }; + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + */ + MethodDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodDescriptorProto) + return object; + var message = new $root.google.protobuf.MethodDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.inputType != null) + message.inputType = String(object.inputType); + if (object.outputType != null) + message.outputType = String(object.outputType); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MethodOptions.fromObject(object.options); + } + if (object.clientStreaming != null) + message.clientStreaming = Boolean(object.clientStreaming); + if (object.serverStreaming != null) + message.serverStreaming = Boolean(object.serverStreaming); + return message; + }; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.MethodDescriptorProto} message MethodDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MethodDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.inputType = ""; + object.outputType = ""; + object.options = null; + object.clientStreaming = false; + object.serverStreaming = false; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.inputType != null && message.hasOwnProperty("inputType")) + object.inputType = message.inputType; + if (message.outputType != null && message.hasOwnProperty("outputType")) + object.outputType = message.outputType; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MethodOptions.toObject(message.options, options); + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + object.clientStreaming = message.clientStreaming; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + object.serverStreaming = message.serverStreaming; + return object; + }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.MethodDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + MethodDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return MethodDescriptorProto; + })(); + + protobuf.FileOptions = (function() { + + /** + * Properties of a FileOptions. + * @memberof google.protobuf + * @interface IFileOptions + * @property {string|null} [javaPackage] FileOptions javaPackage + * @property {string|null} [javaOuterClassname] FileOptions javaOuterClassname + * @property {boolean|null} [javaMultipleFiles] FileOptions javaMultipleFiles + * @property {boolean|null} [javaGenerateEqualsAndHash] FileOptions javaGenerateEqualsAndHash + * @property {boolean|null} [javaStringCheckUtf8] FileOptions javaStringCheckUtf8 + * @property {google.protobuf.FileOptions.OptimizeMode|null} [optimizeFor] FileOptions optimizeFor + * @property {string|null} [goPackage] FileOptions goPackage + * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices + * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices + * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices + * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices + * @property {boolean|null} [deprecated] FileOptions deprecated + * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas + * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix + * @property {string|null} [csharpNamespace] FileOptions csharpNamespace + * @property {string|null} [swiftPrefix] FileOptions swiftPrefix + * @property {string|null} [phpClassPrefix] FileOptions phpClassPrefix + * @property {string|null} [phpNamespace] FileOptions phpNamespace + * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace + * @property {string|null} [rubyPackage] FileOptions rubyPackage + * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption + * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition + */ + + /** + * Constructs a new FileOptions. + * @memberof google.protobuf + * @classdesc Represents a FileOptions. + * @implements IFileOptions + * @constructor + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + */ + function FileOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.resourceDefinition"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileOptions javaPackage. + * @member {string} javaPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaPackage = ""; + + /** + * FileOptions javaOuterClassname. + * @member {string} javaOuterClassname + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaOuterClassname = ""; + + /** + * FileOptions javaMultipleFiles. + * @member {boolean} javaMultipleFiles + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaMultipleFiles = false; + + /** + * FileOptions javaGenerateEqualsAndHash. + * @member {boolean} javaGenerateEqualsAndHash + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenerateEqualsAndHash = false; + + /** + * FileOptions javaStringCheckUtf8. + * @member {boolean} javaStringCheckUtf8 + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaStringCheckUtf8 = false; + + /** + * FileOptions optimizeFor. + * @member {google.protobuf.FileOptions.OptimizeMode} optimizeFor + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.optimizeFor = 1; + + /** + * FileOptions goPackage. + * @member {string} goPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.goPackage = ""; + + /** + * FileOptions ccGenericServices. + * @member {boolean} ccGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.ccGenericServices = false; + + /** + * FileOptions javaGenericServices. + * @member {boolean} javaGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenericServices = false; + + /** + * FileOptions pyGenericServices. + * @member {boolean} pyGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.pyGenericServices = false; + + /** + * FileOptions phpGenericServices. + * @member {boolean} phpGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpGenericServices = false; + + /** + * FileOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.deprecated = false; + + /** + * FileOptions ccEnableArenas. + * @member {boolean} ccEnableArenas + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.ccEnableArenas = false; + + /** + * FileOptions objcClassPrefix. + * @member {string} objcClassPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.objcClassPrefix = ""; + + /** + * FileOptions csharpNamespace. + * @member {string} csharpNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.csharpNamespace = ""; + + /** + * FileOptions swiftPrefix. + * @member {string} swiftPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.swiftPrefix = ""; + + /** + * FileOptions phpClassPrefix. + * @member {string} phpClassPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpClassPrefix = ""; + + /** + * FileOptions phpNamespace. + * @member {string} phpNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpNamespace = ""; + + /** + * FileOptions phpMetadataNamespace. + * @member {string} phpMetadataNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpMetadataNamespace = ""; + + /** + * FileOptions rubyPackage. + * @member {string} rubyPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.rubyPackage = ""; + + /** + * FileOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * FileOptions .google.api.resourceDefinition. + * @member {Array.} .google.api.resourceDefinition + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype[".google.api.resourceDefinition"] = $util.emptyArray; + + /** + * Creates a new FileOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + * @returns {google.protobuf.FileOptions} FileOptions instance + */ + FileOptions.create = function create(properties) { + return new FileOptions(properties); + }; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resourceDefinition"] != null && message[".google.api.resourceDefinition"].length) + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resourceDefinition"][i], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileOptions} FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32(); + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1053: + if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) + message[".google.api.resourceDefinition"] = []; + message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileOptions} FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileOptions message. + * @function verify + * @memberof google.protobuf.FileOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + if (!$util.isString(message.javaPackage)) + return "javaPackage: string expected"; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + if (!$util.isString(message.javaOuterClassname)) + return "javaOuterClassname: string expected"; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + if (typeof message.javaMultipleFiles !== "boolean") + return "javaMultipleFiles: boolean expected"; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + if (typeof message.javaGenerateEqualsAndHash !== "boolean") + return "javaGenerateEqualsAndHash: boolean expected"; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + if (typeof message.javaStringCheckUtf8 !== "boolean") + return "javaStringCheckUtf8: boolean expected"; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + switch (message.optimizeFor) { + default: + return "optimizeFor: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + if (!$util.isString(message.goPackage)) + return "goPackage: string expected"; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + if (typeof message.ccGenericServices !== "boolean") + return "ccGenericServices: boolean expected"; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + if (typeof message.javaGenericServices !== "boolean") + return "javaGenericServices: boolean expected"; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + if (typeof message.pyGenericServices !== "boolean") + return "pyGenericServices: boolean expected"; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + if (typeof message.phpGenericServices !== "boolean") + return "phpGenericServices: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + if (typeof message.ccEnableArenas !== "boolean") + return "ccEnableArenas: boolean expected"; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + if (!$util.isString(message.objcClassPrefix)) + return "objcClassPrefix: string expected"; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + if (!$util.isString(message.csharpNamespace)) + return "csharpNamespace: string expected"; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + if (!$util.isString(message.swiftPrefix)) + return "swiftPrefix: string expected"; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + if (!$util.isString(message.phpClassPrefix)) + return "phpClassPrefix: string expected"; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + if (!$util.isString(message.phpNamespace)) + return "phpNamespace: string expected"; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + if (!$util.isString(message.phpMetadataNamespace)) + return "phpMetadataNamespace: string expected"; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + if (!$util.isString(message.rubyPackage)) + return "rubyPackage: string expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.resourceDefinition"] != null && message.hasOwnProperty(".google.api.resourceDefinition")) { + if (!Array.isArray(message[".google.api.resourceDefinition"])) + return ".google.api.resourceDefinition: array expected"; + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resourceDefinition"][i]); + if (error) + return ".google.api.resourceDefinition." + error; + } + } + return null; + }; + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileOptions} FileOptions + */ + FileOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileOptions) + return object; + var message = new $root.google.protobuf.FileOptions(); + if (object.javaPackage != null) + message.javaPackage = String(object.javaPackage); + if (object.javaOuterClassname != null) + message.javaOuterClassname = String(object.javaOuterClassname); + if (object.javaMultipleFiles != null) + message.javaMultipleFiles = Boolean(object.javaMultipleFiles); + if (object.javaGenerateEqualsAndHash != null) + message.javaGenerateEqualsAndHash = Boolean(object.javaGenerateEqualsAndHash); + if (object.javaStringCheckUtf8 != null) + message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); + switch (object.optimizeFor) { + case "SPEED": + case 1: + message.optimizeFor = 1; + break; + case "CODE_SIZE": + case 2: + message.optimizeFor = 2; + break; + case "LITE_RUNTIME": + case 3: + message.optimizeFor = 3; + break; + } + if (object.goPackage != null) + message.goPackage = String(object.goPackage); + if (object.ccGenericServices != null) + message.ccGenericServices = Boolean(object.ccGenericServices); + if (object.javaGenericServices != null) + message.javaGenericServices = Boolean(object.javaGenericServices); + if (object.pyGenericServices != null) + message.pyGenericServices = Boolean(object.pyGenericServices); + if (object.phpGenericServices != null) + message.phpGenericServices = Boolean(object.phpGenericServices); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.ccEnableArenas != null) + message.ccEnableArenas = Boolean(object.ccEnableArenas); + if (object.objcClassPrefix != null) + message.objcClassPrefix = String(object.objcClassPrefix); + if (object.csharpNamespace != null) + message.csharpNamespace = String(object.csharpNamespace); + if (object.swiftPrefix != null) + message.swiftPrefix = String(object.swiftPrefix); + if (object.phpClassPrefix != null) + message.phpClassPrefix = String(object.phpClassPrefix); + if (object.phpNamespace != null) + message.phpNamespace = String(object.phpNamespace); + if (object.phpMetadataNamespace != null) + message.phpMetadataNamespace = String(object.phpMetadataNamespace); + if (object.rubyPackage != null) + message.rubyPackage = String(object.rubyPackage); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.resourceDefinition"]) { + if (!Array.isArray(object[".google.api.resourceDefinition"])) + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: array expected"); + message[".google.api.resourceDefinition"] = []; + for (var i = 0; i < object[".google.api.resourceDefinition"].length; ++i) { + if (typeof object[".google.api.resourceDefinition"][i] !== "object") + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: object expected"); + message[".google.api.resourceDefinition"][i] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resourceDefinition"][i]); + } + } + return message; + }; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.FileOptions} message FileOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.resourceDefinition"] = []; + } + if (options.defaults) { + object.javaPackage = ""; + object.javaOuterClassname = ""; + object.optimizeFor = options.enums === String ? "SPEED" : 1; + object.javaMultipleFiles = false; + object.goPackage = ""; + object.ccGenericServices = false; + object.javaGenericServices = false; + object.pyGenericServices = false; + object.javaGenerateEqualsAndHash = false; + object.deprecated = false; + object.javaStringCheckUtf8 = false; + object.ccEnableArenas = false; + object.objcClassPrefix = ""; + object.csharpNamespace = ""; + object.swiftPrefix = ""; + object.phpClassPrefix = ""; + object.phpNamespace = ""; + object.phpGenericServices = false; + object.phpMetadataNamespace = ""; + object.rubyPackage = ""; + } + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + object.javaPackage = message.javaPackage; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + object.javaOuterClassname = message.javaOuterClassname; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + object.javaMultipleFiles = message.javaMultipleFiles; + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + object.goPackage = message.goPackage; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + object.ccGenericServices = message.ccGenericServices; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + object.javaGenericServices = message.javaGenericServices; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + object.pyGenericServices = message.pyGenericServices; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + object.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + object.javaStringCheckUtf8 = message.javaStringCheckUtf8; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + object.ccEnableArenas = message.ccEnableArenas; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + object.objcClassPrefix = message.objcClassPrefix; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + object.csharpNamespace = message.csharpNamespace; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + object.swiftPrefix = message.swiftPrefix; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + object.phpClassPrefix = message.phpClassPrefix; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + object.phpNamespace = message.phpNamespace; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + object.phpGenericServices = message.phpGenericServices; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + object.phpMetadataNamespace = message.phpMetadataNamespace; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + object.rubyPackage = message.rubyPackage; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length) { + object[".google.api.resourceDefinition"] = []; + for (var j = 0; j < message[".google.api.resourceDefinition"].length; ++j) + object[".google.api.resourceDefinition"][j] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resourceDefinition"][j], options); + } + return object; + }; + + /** + * Converts this FileOptions to JSON. + * @function toJSON + * @memberof google.protobuf.FileOptions + * @instance + * @returns {Object.} JSON object + */ + FileOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * OptimizeMode enum. + * @name google.protobuf.FileOptions.OptimizeMode + * @enum {string} + * @property {number} SPEED=1 SPEED value + * @property {number} CODE_SIZE=2 CODE_SIZE value + * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value + */ + FileOptions.OptimizeMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "SPEED"] = 1; + values[valuesById[2] = "CODE_SIZE"] = 2; + values[valuesById[3] = "LITE_RUNTIME"] = 3; + return values; + })(); + + return FileOptions; + })(); + + protobuf.MessageOptions = (function() { + + /** + * Properties of a MessageOptions. + * @memberof google.protobuf + * @interface IMessageOptions + * @property {boolean|null} [messageSetWireFormat] MessageOptions messageSetWireFormat + * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor + * @property {boolean|null} [deprecated] MessageOptions deprecated + * @property {boolean|null} [mapEntry] MessageOptions mapEntry + * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption + * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource + */ + + /** + * Constructs a new MessageOptions. + * @memberof google.protobuf + * @classdesc Represents a MessageOptions. + * @implements IMessageOptions + * @constructor + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + */ + function MessageOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MessageOptions messageSetWireFormat. + * @member {boolean} messageSetWireFormat + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.messageSetWireFormat = false; + + /** + * MessageOptions noStandardDescriptorAccessor. + * @member {boolean} noStandardDescriptorAccessor + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.noStandardDescriptorAccessor = false; + + /** + * MessageOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.deprecated = false; + + /** + * MessageOptions mapEntry. + * @member {boolean} mapEntry + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.mapEntry = false; + + /** + * MessageOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * MessageOptions .google.api.resource. + * @member {google.api.IResourceDescriptor|null|undefined} .google.api.resource + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype[".google.api.resource"] = null; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + * @returns {google.protobuf.MessageOptions} MessageOptions instance + */ + MessageOptions.create = function create(properties) { + return new MessageOptions(properties); + }; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MessageOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MessageOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MessageOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MessageOptions} MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MessageOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1053: + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MessageOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MessageOptions} MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MessageOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MessageOptions message. + * @function verify + * @memberof google.protobuf.MessageOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MessageOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + if (typeof message.messageSetWireFormat !== "boolean") + return "messageSetWireFormat: boolean expected"; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + if (typeof message.noStandardDescriptorAccessor !== "boolean") + return "noStandardDescriptorAccessor: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + if (typeof message.mapEntry !== "boolean") + return "mapEntry: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resource"]); + if (error) + return ".google.api.resource." + error; + } + return null; + }; + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MessageOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MessageOptions} MessageOptions + */ + MessageOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MessageOptions) + return object; + var message = new $root.google.protobuf.MessageOptions(); + if (object.messageSetWireFormat != null) + message.messageSetWireFormat = Boolean(object.messageSetWireFormat); + if (object.noStandardDescriptorAccessor != null) + message.noStandardDescriptorAccessor = Boolean(object.noStandardDescriptorAccessor); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.mapEntry != null) + message.mapEntry = Boolean(object.mapEntry); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.resource"] != null) { + if (typeof object[".google.api.resource"] !== "object") + throw TypeError(".google.protobuf.MessageOptions..google.api.resource: object expected"); + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resource"]); + } + return message; + }; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.MessageOptions} message MessageOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MessageOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.messageSetWireFormat = false; + object.noStandardDescriptorAccessor = false; + object.deprecated = false; + object.mapEntry = false; + object[".google.api.resource"] = null; + } + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + object.messageSetWireFormat = message.messageSetWireFormat; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + object.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + object.mapEntry = message.mapEntry; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + object[".google.api.resource"] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resource"], options); + return object; + }; + + /** + * Converts this MessageOptions to JSON. + * @function toJSON + * @memberof google.protobuf.MessageOptions + * @instance + * @returns {Object.} JSON object + */ + MessageOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return MessageOptions; + })(); + + protobuf.FieldOptions = (function() { + + /** + * Properties of a FieldOptions. + * @memberof google.protobuf + * @interface IFieldOptions + * @property {google.protobuf.FieldOptions.CType|null} [ctype] FieldOptions ctype + * @property {boolean|null} [packed] FieldOptions packed + * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype + * @property {boolean|null} [lazy] FieldOptions lazy + * @property {boolean|null} [deprecated] FieldOptions deprecated + * @property {boolean|null} [weak] FieldOptions weak + * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption + * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior + * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference + */ + + /** + * Constructs a new FieldOptions. + * @memberof google.protobuf + * @classdesc Represents a FieldOptions. + * @implements IFieldOptions + * @constructor + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + */ + function FieldOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.fieldBehavior"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldOptions ctype. + * @member {google.protobuf.FieldOptions.CType} ctype + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.ctype = 0; + + /** + * FieldOptions packed. + * @member {boolean} packed + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.packed = false; + + /** + * FieldOptions jstype. + * @member {google.protobuf.FieldOptions.JSType} jstype + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.jstype = 0; + + /** + * FieldOptions lazy. + * @member {boolean} lazy + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.lazy = false; + + /** + * FieldOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.deprecated = false; + + /** + * FieldOptions weak. + * @member {boolean} weak + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.weak = false; + + /** + * FieldOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * FieldOptions .google.api.fieldBehavior. + * @member {Array.} .google.api.fieldBehavior + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.api.fieldBehavior"] = $util.emptyArray; + + /** + * FieldOptions .google.api.resourceReference. + * @member {google.api.IResourceReference|null|undefined} .google.api.resourceReference + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.api.resourceReference"] = null; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions} FieldOptions instance + */ + FieldOptions.create = function create(properties) { + return new FieldOptions(properties); + }; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.ctype != null && message.hasOwnProperty("ctype")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); + if (message.packed != null && message.hasOwnProperty("packed")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.lazy != null && message.hasOwnProperty("lazy")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); + if (message.jstype != null && message.hasOwnProperty("jstype")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); + if (message.weak != null && message.hasOwnProperty("weak")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { + writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + writer.int32(message[".google.api.fieldBehavior"][i]); + writer.ldelim(); + } + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions} FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32(); + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32(); + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1052: + if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) + message[".google.api.fieldBehavior"] = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message[".google.api.fieldBehavior"].push(reader.int32()); + } else + message[".google.api.fieldBehavior"].push(reader.int32()); + break; + case 1055: + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions} FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldOptions message. + * @function verify + * @memberof google.protobuf.FieldOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.ctype != null && message.hasOwnProperty("ctype")) + switch (message.ctype) { + default: + return "ctype: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.packed != null && message.hasOwnProperty("packed")) + if (typeof message.packed !== "boolean") + return "packed: boolean expected"; + if (message.jstype != null && message.hasOwnProperty("jstype")) + switch (message.jstype) { + default: + return "jstype: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.lazy != null && message.hasOwnProperty("lazy")) + if (typeof message.lazy !== "boolean") + return "lazy: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.weak != null && message.hasOwnProperty("weak")) + if (typeof message.weak !== "boolean") + return "weak: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { + if (!Array.isArray(message[".google.api.fieldBehavior"])) + return ".google.api.fieldBehavior: array expected"; + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + switch (message[".google.api.fieldBehavior"][i]) { + default: + return ".google.api.fieldBehavior: enum value[] expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + break; + } + } + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) { + var error = $root.google.api.ResourceReference.verify(message[".google.api.resourceReference"]); + if (error) + return ".google.api.resourceReference." + error; + } + return null; + }; + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions} FieldOptions + */ + FieldOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions) + return object; + var message = new $root.google.protobuf.FieldOptions(); + switch (object.ctype) { + case "STRING": + case 0: + message.ctype = 0; + break; + case "CORD": + case 1: + message.ctype = 1; + break; + case "STRING_PIECE": + case 2: + message.ctype = 2; + break; + } + if (object.packed != null) + message.packed = Boolean(object.packed); + switch (object.jstype) { + case "JS_NORMAL": + case 0: + message.jstype = 0; + break; + case "JS_STRING": + case 1: + message.jstype = 1; + break; + case "JS_NUMBER": + case 2: + message.jstype = 2; + break; + } + if (object.lazy != null) + message.lazy = Boolean(object.lazy); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.weak != null) + message.weak = Boolean(object.weak); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.fieldBehavior"]) { + if (!Array.isArray(object[".google.api.fieldBehavior"])) + throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); + message[".google.api.fieldBehavior"] = []; + for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) + switch (object[".google.api.fieldBehavior"][i]) { + default: + case "FIELD_BEHAVIOR_UNSPECIFIED": + case 0: + message[".google.api.fieldBehavior"][i] = 0; + break; + case "OPTIONAL": + case 1: + message[".google.api.fieldBehavior"][i] = 1; + break; + case "REQUIRED": + case 2: + message[".google.api.fieldBehavior"][i] = 2; + break; + case "OUTPUT_ONLY": + case 3: + message[".google.api.fieldBehavior"][i] = 3; + break; + case "INPUT_ONLY": + case 4: + message[".google.api.fieldBehavior"][i] = 4; + break; + case "IMMUTABLE": + case 5: + message[".google.api.fieldBehavior"][i] = 5; + break; + } + } + if (object[".google.api.resourceReference"] != null) { + if (typeof object[".google.api.resourceReference"] !== "object") + throw TypeError(".google.protobuf.FieldOptions..google.api.resourceReference: object expected"); + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.fromObject(object[".google.api.resourceReference"]); + } + return message; + }; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.FieldOptions} message FieldOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.fieldBehavior"] = []; + } + if (options.defaults) { + object.ctype = options.enums === String ? "STRING" : 0; + object.packed = false; + object.deprecated = false; + object.lazy = false; + object.jstype = options.enums === String ? "JS_NORMAL" : 0; + object.weak = false; + object[".google.api.resourceReference"] = null; + } + if (message.ctype != null && message.hasOwnProperty("ctype")) + object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; + if (message.packed != null && message.hasOwnProperty("packed")) + object.packed = message.packed; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.lazy != null && message.hasOwnProperty("lazy")) + object.lazy = message.lazy; + if (message.jstype != null && message.hasOwnProperty("jstype")) + object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; + if (message.weak != null && message.hasOwnProperty("weak")) + object.weak = message.weak; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { + object[".google.api.fieldBehavior"] = []; + for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) + object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; + } + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); + return object; + }; + + /** + * Converts this FieldOptions to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions + * @instance + * @returns {Object.} JSON object + */ + FieldOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * CType enum. + * @name google.protobuf.FieldOptions.CType + * @enum {string} + * @property {number} STRING=0 STRING value + * @property {number} CORD=1 CORD value + * @property {number} STRING_PIECE=2 STRING_PIECE value + */ + FieldOptions.CType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STRING"] = 0; + values[valuesById[1] = "CORD"] = 1; + values[valuesById[2] = "STRING_PIECE"] = 2; + return values; + })(); + + /** + * JSType enum. + * @name google.protobuf.FieldOptions.JSType + * @enum {string} + * @property {number} JS_NORMAL=0 JS_NORMAL value + * @property {number} JS_STRING=1 JS_STRING value + * @property {number} JS_NUMBER=2 JS_NUMBER value + */ + FieldOptions.JSType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "JS_NORMAL"] = 0; + values[valuesById[1] = "JS_STRING"] = 1; + values[valuesById[2] = "JS_NUMBER"] = 2; + return values; + })(); + + return FieldOptions; + })(); + + protobuf.OneofOptions = (function() { + + /** + * Properties of an OneofOptions. + * @memberof google.protobuf + * @interface IOneofOptions + * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption + */ + + /** + * Constructs a new OneofOptions. + * @memberof google.protobuf + * @classdesc Represents an OneofOptions. + * @implements IOneofOptions + * @constructor + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + */ + function OneofOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * OneofOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.OneofOptions + * @instance + */ + OneofOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + * @returns {google.protobuf.OneofOptions} OneofOptions instance + */ + OneofOptions.create = function create(properties) { + return new OneofOptions(properties); + }; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofOptions} OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.OneofOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.OneofOptions} OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an OneofOptions message. + * @function verify + * @memberof google.protobuf.OneofOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + OneofOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.OneofOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.OneofOptions} OneofOptions + */ + OneofOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofOptions) + return object; + var message = new $root.google.protobuf.OneofOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.OneofOptions} message OneofOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + OneofOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this OneofOptions to JSON. + * @function toJSON + * @memberof google.protobuf.OneofOptions + * @instance + * @returns {Object.} JSON object + */ + OneofOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return OneofOptions; + })(); + + protobuf.EnumOptions = (function() { + + /** + * Properties of an EnumOptions. + * @memberof google.protobuf + * @interface IEnumOptions + * @property {boolean|null} [allowAlias] EnumOptions allowAlias + * @property {boolean|null} [deprecated] EnumOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption + */ + + /** + * Constructs a new EnumOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumOptions. + * @implements IEnumOptions + * @constructor + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + */ + function EnumOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumOptions allowAlias. + * @member {boolean} allowAlias + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.allowAlias = false; + + /** + * EnumOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.deprecated = false; + + /** + * EnumOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumOptions} EnumOptions instance + */ + EnumOptions.create = function create(properties) { + return new EnumOptions(properties); + }; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumOptions message. + * @function verify + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + if (typeof message.allowAlias !== "boolean") + return "allowAlias: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumOptions} EnumOptions + */ + EnumOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumOptions) + return object; + var message = new $root.google.protobuf.EnumOptions(); + if (object.allowAlias != null) + message.allowAlias = Boolean(object.allowAlias); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.EnumOptions} message EnumOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.allowAlias = false; + object.deprecated = false; + } + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + object.allowAlias = message.allowAlias; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this EnumOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumOptions + * @instance + * @returns {Object.} JSON object + */ + EnumOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return EnumOptions; + })(); + + protobuf.EnumValueOptions = (function() { + + /** + * Properties of an EnumValueOptions. + * @memberof google.protobuf + * @interface IEnumValueOptions + * @property {boolean|null} [deprecated] EnumValueOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption + */ + + /** + * Constructs a new EnumValueOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumValueOptions. + * @implements IEnumValueOptions + * @constructor + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + */ + function EnumValueOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumValueOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.deprecated = false; + + /** + * EnumValueOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance + */ + EnumValueOptions.create = function create(properties) { + return new EnumValueOptions(properties); + }; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumValueOptions message. + * @function verify + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumValueOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + */ + EnumValueOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueOptions) + return object; + var message = new $root.google.protobuf.EnumValueOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.EnumValueOptions} message EnumValueOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumValueOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) + object.deprecated = false; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this EnumValueOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumValueOptions + * @instance + * @returns {Object.} JSON object + */ + EnumValueOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return EnumValueOptions; + })(); + + protobuf.ServiceOptions = (function() { + + /** + * Properties of a ServiceOptions. + * @memberof google.protobuf + * @interface IServiceOptions + * @property {boolean|null} [deprecated] ServiceOptions deprecated + * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption + * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost + * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes + */ + + /** + * Constructs a new ServiceOptions. + * @memberof google.protobuf + * @classdesc Represents a ServiceOptions. + * @implements IServiceOptions + * @constructor + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + */ + function ServiceOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ServiceOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.deprecated = false; + + /** + * ServiceOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * ServiceOptions .google.api.defaultHost. + * @member {string} .google.api.defaultHost + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.defaultHost"] = ""; + + /** + * ServiceOptions .google.api.oauthScopes. + * @member {string} .google.api.oauthScopes + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.oauthScopes"] = ""; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + * @returns {google.protobuf.ServiceOptions} ServiceOptions instance + */ + ServiceOptions.create = function create(properties) { + return new ServiceOptions(properties); + }; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); + return writer; + }; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ServiceOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1049: + message[".google.api.defaultHost"] = reader.string(); + break; + case 1050: + message[".google.api.oauthScopes"] = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ServiceOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ServiceOptions message. + * @function verify + * @memberof google.protobuf.ServiceOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ServiceOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + if (!$util.isString(message[".google.api.defaultHost"])) + return ".google.api.defaultHost: string expected"; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + if (!$util.isString(message[".google.api.oauthScopes"])) + return ".google.api.oauthScopes: string expected"; + return null; + }; + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ServiceOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ServiceOptions} ServiceOptions + */ + ServiceOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceOptions) + return object; + var message = new $root.google.protobuf.ServiceOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.defaultHost"] != null) + message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); + if (object[".google.api.oauthScopes"] != null) + message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); + return message; + }; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.ServiceOptions} message ServiceOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ServiceOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.deprecated = false; + object[".google.api.defaultHost"] = ""; + object[".google.api.oauthScopes"] = ""; + } + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; + return object; + }; + + /** + * Converts this ServiceOptions to JSON. + * @function toJSON + * @memberof google.protobuf.ServiceOptions + * @instance + * @returns {Object.} JSON object + */ + ServiceOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ServiceOptions; + })(); + + protobuf.MethodOptions = (function() { + + /** + * Properties of a MethodOptions. + * @memberof google.protobuf + * @interface IMethodOptions + * @property {boolean|null} [deprecated] MethodOptions deprecated + * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel + * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption + * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http + * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature + */ + + /** + * Constructs a new MethodOptions. + * @memberof google.protobuf + * @classdesc Represents a MethodOptions. + * @implements IMethodOptions + * @constructor + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + */ + function MethodOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.methodSignature"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MethodOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.deprecated = false; + + /** + * MethodOptions idempotencyLevel. + * @member {google.protobuf.MethodOptions.IdempotencyLevel} idempotencyLevel + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.idempotencyLevel = 0; + + /** + * MethodOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * MethodOptions .google.api.http. + * @member {google.api.IHttpRule|null|undefined} .google.api.http + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype[".google.api.http"] = null; + + /** + * MethodOptions .google.api.methodSignature. + * @member {Array.} .google.api.methodSignature + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype[".google.api.methodSignature"] = $util.emptyArray; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + * @returns {google.protobuf.MethodOptions} MethodOptions instance + */ + MethodOptions.create = function create(properties) { + return new MethodOptions(properties); + }; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MethodOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MethodOptions} MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 72295728: + message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); + break; + case 1051: + if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) + message[".google.api.methodSignature"] = []; + message[".google.api.methodSignature"].push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MethodOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MethodOptions} MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MethodOptions message. + * @function verify + * @memberof google.protobuf.MethodOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MethodOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + switch (message.idempotencyLevel) { + default: + return "idempotencyLevel: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) { + var error = $root.google.api.HttpRule.verify(message[".google.api.http"]); + if (error) + return ".google.api.http." + error; + } + if (message[".google.api.methodSignature"] != null && message.hasOwnProperty(".google.api.methodSignature")) { + if (!Array.isArray(message[".google.api.methodSignature"])) + return ".google.api.methodSignature: array expected"; + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + if (!$util.isString(message[".google.api.methodSignature"][i])) + return ".google.api.methodSignature: string[] expected"; + } + return null; + }; + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MethodOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MethodOptions} MethodOptions + */ + MethodOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodOptions) + return object; + var message = new $root.google.protobuf.MethodOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + switch (object.idempotencyLevel) { + case "IDEMPOTENCY_UNKNOWN": + case 0: + message.idempotencyLevel = 0; + break; + case "NO_SIDE_EFFECTS": + case 1: + message.idempotencyLevel = 1; + break; + case "IDEMPOTENT": + case 2: + message.idempotencyLevel = 2; + break; + } + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.http"] != null) { + if (typeof object[".google.api.http"] !== "object") + throw TypeError(".google.protobuf.MethodOptions..google.api.http: object expected"); + message[".google.api.http"] = $root.google.api.HttpRule.fromObject(object[".google.api.http"]); + } + if (object[".google.api.methodSignature"]) { + if (!Array.isArray(object[".google.api.methodSignature"])) + throw TypeError(".google.protobuf.MethodOptions..google.api.methodSignature: array expected"); + message[".google.api.methodSignature"] = []; + for (var i = 0; i < object[".google.api.methodSignature"].length; ++i) + message[".google.api.methodSignature"][i] = String(object[".google.api.methodSignature"][i]); + } + return message; + }; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.MethodOptions} message MethodOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MethodOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.methodSignature"] = []; + } + if (options.defaults) { + object.deprecated = false; + object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; + object[".google.api.http"] = null; + } + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length) { + object[".google.api.methodSignature"] = []; + for (var j = 0; j < message[".google.api.methodSignature"].length; ++j) + object[".google.api.methodSignature"][j] = message[".google.api.methodSignature"][j]; + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + object[".google.api.http"] = $root.google.api.HttpRule.toObject(message[".google.api.http"], options); + return object; + }; + + /** + * Converts this MethodOptions to JSON. + * @function toJSON + * @memberof google.protobuf.MethodOptions + * @instance + * @returns {Object.} JSON object + */ + MethodOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * IdempotencyLevel enum. + * @name google.protobuf.MethodOptions.IdempotencyLevel + * @enum {string} + * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value + * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value + * @property {number} IDEMPOTENT=2 IDEMPOTENT value + */ + MethodOptions.IdempotencyLevel = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "IDEMPOTENCY_UNKNOWN"] = 0; + values[valuesById[1] = "NO_SIDE_EFFECTS"] = 1; + values[valuesById[2] = "IDEMPOTENT"] = 2; + return values; + })(); + + return MethodOptions; + })(); + + protobuf.UninterpretedOption = (function() { + + /** + * Properties of an UninterpretedOption. + * @memberof google.protobuf + * @interface IUninterpretedOption + * @property {Array.|null} [name] UninterpretedOption name + * @property {string|null} [identifierValue] UninterpretedOption identifierValue + * @property {number|Long|null} [positiveIntValue] UninterpretedOption positiveIntValue + * @property {number|Long|null} [negativeIntValue] UninterpretedOption negativeIntValue + * @property {number|null} [doubleValue] UninterpretedOption doubleValue + * @property {Uint8Array|null} [stringValue] UninterpretedOption stringValue + * @property {string|null} [aggregateValue] UninterpretedOption aggregateValue + */ + + /** + * Constructs a new UninterpretedOption. + * @memberof google.protobuf + * @classdesc Represents an UninterpretedOption. + * @implements IUninterpretedOption + * @constructor + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + */ + function UninterpretedOption(properties) { + this.name = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UninterpretedOption name. + * @member {Array.} name + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.name = $util.emptyArray; + + /** + * UninterpretedOption identifierValue. + * @member {string} identifierValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.identifierValue = ""; + + /** + * UninterpretedOption positiveIntValue. + * @member {number|Long} positiveIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.positiveIntValue = $util.Long ? $util.Long.fromBits(0,0,true) : 0; + + /** + * UninterpretedOption negativeIntValue. + * @member {number|Long} negativeIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.negativeIntValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * UninterpretedOption doubleValue. + * @member {number} doubleValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.doubleValue = 0; + + /** + * UninterpretedOption stringValue. + * @member {Uint8Array} stringValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.stringValue = $util.newBuffer([]); + + /** + * UninterpretedOption aggregateValue. + * @member {string} aggregateValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.aggregateValue = ""; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @function create + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption instance + */ + UninterpretedOption.create = function create(properties) { + return new UninterpretedOption(properties); + }; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UninterpretedOption.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.name.length) + for (var i = 0; i < message.name.length; ++i) + $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); + return writer; + }; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UninterpretedOption.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UninterpretedOption.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (!(message.name && message.name.length)) + message.name = []; + message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = reader.uint64(); + break; + case 5: + message.negativeIntValue = reader.int64(); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UninterpretedOption.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an UninterpretedOption message. + * @function verify + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UninterpretedOption.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) { + if (!Array.isArray(message.name)) + return "name: array expected"; + for (var i = 0; i < message.name.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.NamePart.verify(message.name[i]); + if (error) + return "name." + error; + } + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + if (!$util.isString(message.identifierValue)) + return "identifierValue: string expected"; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (!$util.isInteger(message.positiveIntValue) && !(message.positiveIntValue && $util.isInteger(message.positiveIntValue.low) && $util.isInteger(message.positiveIntValue.high))) + return "positiveIntValue: integer|Long expected"; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (!$util.isInteger(message.negativeIntValue) && !(message.negativeIntValue && $util.isInteger(message.negativeIntValue.low) && $util.isInteger(message.negativeIntValue.high))) + return "negativeIntValue: integer|Long expected"; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + if (typeof message.doubleValue !== "number") + return "doubleValue: number expected"; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (!(message.stringValue && typeof message.stringValue.length === "number" || $util.isString(message.stringValue))) + return "stringValue: buffer expected"; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + if (!$util.isString(message.aggregateValue)) + return "aggregateValue: string expected"; + return null; + }; + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + */ + UninterpretedOption.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption) + return object; + var message = new $root.google.protobuf.UninterpretedOption(); + if (object.name) { + if (!Array.isArray(object.name)) + throw TypeError(".google.protobuf.UninterpretedOption.name: array expected"); + message.name = []; + for (var i = 0; i < object.name.length; ++i) { + if (typeof object.name[i] !== "object") + throw TypeError(".google.protobuf.UninterpretedOption.name: object expected"); + message.name[i] = $root.google.protobuf.UninterpretedOption.NamePart.fromObject(object.name[i]); + } + } + if (object.identifierValue != null) + message.identifierValue = String(object.identifierValue); + if (object.positiveIntValue != null) + if ($util.Long) + (message.positiveIntValue = $util.Long.fromValue(object.positiveIntValue)).unsigned = true; + else if (typeof object.positiveIntValue === "string") + message.positiveIntValue = parseInt(object.positiveIntValue, 10); + else if (typeof object.positiveIntValue === "number") + message.positiveIntValue = object.positiveIntValue; + else if (typeof object.positiveIntValue === "object") + message.positiveIntValue = new $util.LongBits(object.positiveIntValue.low >>> 0, object.positiveIntValue.high >>> 0).toNumber(true); + if (object.negativeIntValue != null) + if ($util.Long) + (message.negativeIntValue = $util.Long.fromValue(object.negativeIntValue)).unsigned = false; + else if (typeof object.negativeIntValue === "string") + message.negativeIntValue = parseInt(object.negativeIntValue, 10); + else if (typeof object.negativeIntValue === "number") + message.negativeIntValue = object.negativeIntValue; + else if (typeof object.negativeIntValue === "object") + message.negativeIntValue = new $util.LongBits(object.negativeIntValue.low >>> 0, object.negativeIntValue.high >>> 0).toNumber(); + if (object.doubleValue != null) + message.doubleValue = Number(object.doubleValue); + if (object.stringValue != null) + if (typeof object.stringValue === "string") + $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); + else if (object.stringValue.length) + message.stringValue = object.stringValue; + if (object.aggregateValue != null) + message.aggregateValue = String(object.aggregateValue); + return message; + }; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.UninterpretedOption} message UninterpretedOption + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UninterpretedOption.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.name = []; + if (options.defaults) { + object.identifierValue = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, true); + object.positiveIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.positiveIntValue = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.negativeIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.negativeIntValue = options.longs === String ? "0" : 0; + object.doubleValue = 0; + if (options.bytes === String) + object.stringValue = ""; + else { + object.stringValue = []; + if (options.bytes !== Array) + object.stringValue = $util.newBuffer(object.stringValue); + } + object.aggregateValue = ""; + } + if (message.name && message.name.length) { + object.name = []; + for (var j = 0; j < message.name.length; ++j) + object.name[j] = $root.google.protobuf.UninterpretedOption.NamePart.toObject(message.name[j], options); + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + object.identifierValue = message.identifierValue; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (typeof message.positiveIntValue === "number") + object.positiveIntValue = options.longs === String ? String(message.positiveIntValue) : message.positiveIntValue; + else + object.positiveIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.positiveIntValue) : options.longs === Number ? new $util.LongBits(message.positiveIntValue.low >>> 0, message.positiveIntValue.high >>> 0).toNumber(true) : message.positiveIntValue; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (typeof message.negativeIntValue === "number") + object.negativeIntValue = options.longs === String ? String(message.negativeIntValue) : message.negativeIntValue; + else + object.negativeIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.negativeIntValue) : options.longs === Number ? new $util.LongBits(message.negativeIntValue.low >>> 0, message.negativeIntValue.high >>> 0).toNumber() : message.negativeIntValue; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + object.stringValue = options.bytes === String ? $util.base64.encode(message.stringValue, 0, message.stringValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.stringValue) : message.stringValue; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + object.aggregateValue = message.aggregateValue; + return object; + }; + + /** + * Converts this UninterpretedOption to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption + * @instance + * @returns {Object.} JSON object + */ + UninterpretedOption.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + UninterpretedOption.NamePart = (function() { + + /** + * Properties of a NamePart. + * @memberof google.protobuf.UninterpretedOption + * @interface INamePart + * @property {string} namePart NamePart namePart + * @property {boolean} isExtension NamePart isExtension + */ + + /** + * Constructs a new NamePart. + * @memberof google.protobuf.UninterpretedOption + * @classdesc Represents a NamePart. + * @implements INamePart + * @constructor + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + */ + function NamePart(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * NamePart namePart. + * @member {string} namePart + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.namePart = ""; + + /** + * NamePart isExtension. + * @member {boolean} isExtension + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.isExtension = false; + + /** + * Creates a new NamePart instance using the specified properties. + * @function create + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart instance + */ + NamePart.create = function create(properties) { + return new NamePart(properties); + }; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + writer.uint32(/* id 1, wireType 2 =*/10).string(message.namePart); + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.isExtension); + return writer; + }; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + if (!message.hasOwnProperty("namePart")) + throw $util.ProtocolError("missing required 'namePart'", { instance: message }); + if (!message.hasOwnProperty("isExtension")) + throw $util.ProtocolError("missing required 'isExtension'", { instance: message }); + return message; + }; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a NamePart message. + * @function verify + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + NamePart.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (!$util.isString(message.namePart)) + return "namePart: string expected"; + if (typeof message.isExtension !== "boolean") + return "isExtension: boolean expected"; + return null; + }; + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + */ + NamePart.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) + return object; + var message = new $root.google.protobuf.UninterpretedOption.NamePart(); + if (object.namePart != null) + message.namePart = String(object.namePart); + if (object.isExtension != null) + message.isExtension = Boolean(object.isExtension); + return message; + }; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + NamePart.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.namePart = ""; + object.isExtension = false; + } + if (message.namePart != null && message.hasOwnProperty("namePart")) + object.namePart = message.namePart; + if (message.isExtension != null && message.hasOwnProperty("isExtension")) + object.isExtension = message.isExtension; + return object; + }; + + /** + * Converts this NamePart to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + * @returns {Object.} JSON object + */ + NamePart.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return NamePart; + })(); + + return UninterpretedOption; + })(); + + protobuf.SourceCodeInfo = (function() { + + /** + * Properties of a SourceCodeInfo. + * @memberof google.protobuf + * @interface ISourceCodeInfo + * @property {Array.|null} [location] SourceCodeInfo location + */ + + /** + * Constructs a new SourceCodeInfo. + * @memberof google.protobuf + * @classdesc Represents a SourceCodeInfo. + * @implements ISourceCodeInfo + * @constructor + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + */ + function SourceCodeInfo(properties) { + this.location = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SourceCodeInfo location. + * @member {Array.} location + * @memberof google.protobuf.SourceCodeInfo + * @instance + */ + SourceCodeInfo.prototype.location = $util.emptyArray; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @function create + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo instance + */ + SourceCodeInfo.create = function create(properties) { + return new SourceCodeInfo(properties); + }; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @function encode + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SourceCodeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.location != null && message.location.length) + for (var i = 0; i < message.location.length; ++i) + $root.google.protobuf.SourceCodeInfo.Location.encode(message.location[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SourceCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SourceCodeInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.location && message.location.length)) + message.location = []; + message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SourceCodeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SourceCodeInfo message. + * @function verify + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SourceCodeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.location != null && message.hasOwnProperty("location")) { + if (!Array.isArray(message.location)) + return "location: array expected"; + for (var i = 0; i < message.location.length; ++i) { + var error = $root.google.protobuf.SourceCodeInfo.Location.verify(message.location[i]); + if (error) + return "location." + error; + } + } + return null; + }; + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + */ + SourceCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo) + return object; + var message = new $root.google.protobuf.SourceCodeInfo(); + if (object.location) { + if (!Array.isArray(object.location)) + throw TypeError(".google.protobuf.SourceCodeInfo.location: array expected"); + message.location = []; + for (var i = 0; i < object.location.length; ++i) { + if (typeof object.location[i] !== "object") + throw TypeError(".google.protobuf.SourceCodeInfo.location: object expected"); + message.location[i] = $root.google.protobuf.SourceCodeInfo.Location.fromObject(object.location[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.SourceCodeInfo} message SourceCodeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SourceCodeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.location = []; + if (message.location && message.location.length) { + object.location = []; + for (var j = 0; j < message.location.length; ++j) + object.location[j] = $root.google.protobuf.SourceCodeInfo.Location.toObject(message.location[j], options); + } + return object; + }; + + /** + * Converts this SourceCodeInfo to JSON. + * @function toJSON + * @memberof google.protobuf.SourceCodeInfo + * @instance + * @returns {Object.} JSON object + */ + SourceCodeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + SourceCodeInfo.Location = (function() { + + /** + * Properties of a Location. + * @memberof google.protobuf.SourceCodeInfo + * @interface ILocation + * @property {Array.|null} [path] Location path + * @property {Array.|null} [span] Location span + * @property {string|null} [leadingComments] Location leadingComments + * @property {string|null} [trailingComments] Location trailingComments + * @property {Array.|null} [leadingDetachedComments] Location leadingDetachedComments + */ + + /** + * Constructs a new Location. + * @memberof google.protobuf.SourceCodeInfo + * @classdesc Represents a Location. + * @implements ILocation + * @constructor + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set + */ + function Location(properties) { + this.path = []; + this.span = []; + this.leadingDetachedComments = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Location path. + * @member {Array.} path + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.path = $util.emptyArray; + + /** + * Location span. + * @member {Array.} span + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.span = $util.emptyArray; + + /** + * Location leadingComments. + * @member {string} leadingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingComments = ""; + + /** + * Location trailingComments. + * @member {string} trailingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.trailingComments = ""; + + /** + * Location leadingDetachedComments. + * @member {Array.} leadingDetachedComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingDetachedComments = $util.emptyArray; + + /** + * Creates a new Location instance using the specified properties. + * @function create + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo.Location} Location instance + */ + Location.create = function create(properties) { + return new Location(properties); + }; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @function encode + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Location.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.span != null && message.span.length) { + writer.uint32(/* id 2, wireType 2 =*/18).fork(); + for (var i = 0; i < message.span.length; ++i) + writer.int32(message.span[i]); + writer.ldelim(); + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); + if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.leadingDetachedComments[i]); + return writer; + }; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Location.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Location message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.SourceCodeInfo.Location} Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Location.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); + break; + case 2: + if (!(message.span && message.span.length)) + message.span = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.span.push(reader.int32()); + } else + message.span.push(reader.int32()); + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) + message.leadingDetachedComments = []; + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.SourceCodeInfo.Location} Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Location.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Location message. + * @function verify + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Location.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; + } + if (message.span != null && message.hasOwnProperty("span")) { + if (!Array.isArray(message.span)) + return "span: array expected"; + for (var i = 0; i < message.span.length; ++i) + if (!$util.isInteger(message.span[i])) + return "span: integer[] expected"; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + if (!$util.isString(message.leadingComments)) + return "leadingComments: string expected"; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + if (!$util.isString(message.trailingComments)) + return "trailingComments: string expected"; + if (message.leadingDetachedComments != null && message.hasOwnProperty("leadingDetachedComments")) { + if (!Array.isArray(message.leadingDetachedComments)) + return "leadingDetachedComments: array expected"; + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + if (!$util.isString(message.leadingDetachedComments[i])) + return "leadingDetachedComments: string[] expected"; + } + return null; + }; + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.SourceCodeInfo.Location} Location + */ + Location.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo.Location) + return object; + var message = new $root.google.protobuf.SourceCodeInfo.Location(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.span) { + if (!Array.isArray(object.span)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.span: array expected"); + message.span = []; + for (var i = 0; i < object.span.length; ++i) + message.span[i] = object.span[i] | 0; + } + if (object.leadingComments != null) + message.leadingComments = String(object.leadingComments); + if (object.trailingComments != null) + message.trailingComments = String(object.trailingComments); + if (object.leadingDetachedComments) { + if (!Array.isArray(object.leadingDetachedComments)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.leadingDetachedComments: array expected"); + message.leadingDetachedComments = []; + for (var i = 0; i < object.leadingDetachedComments.length; ++i) + message.leadingDetachedComments[i] = String(object.leadingDetachedComments[i]); + } + return message; + }; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.Location} message Location + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Location.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.path = []; + object.span = []; + object.leadingDetachedComments = []; + } + if (options.defaults) { + object.leadingComments = ""; + object.trailingComments = ""; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.span && message.span.length) { + object.span = []; + for (var j = 0; j < message.span.length; ++j) + object.span[j] = message.span[j]; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + object.leadingComments = message.leadingComments; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + object.trailingComments = message.trailingComments; + if (message.leadingDetachedComments && message.leadingDetachedComments.length) { + object.leadingDetachedComments = []; + for (var j = 0; j < message.leadingDetachedComments.length; ++j) + object.leadingDetachedComments[j] = message.leadingDetachedComments[j]; + } + return object; + }; + + /** + * Converts this Location to JSON. + * @function toJSON + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + * @returns {Object.} JSON object + */ + Location.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Location; + })(); + + return SourceCodeInfo; + })(); + + protobuf.GeneratedCodeInfo = (function() { + + /** + * Properties of a GeneratedCodeInfo. + * @memberof google.protobuf + * @interface IGeneratedCodeInfo + * @property {Array.|null} [annotation] GeneratedCodeInfo annotation + */ + + /** + * Constructs a new GeneratedCodeInfo. + * @memberof google.protobuf + * @classdesc Represents a GeneratedCodeInfo. + * @implements IGeneratedCodeInfo + * @constructor + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + */ + function GeneratedCodeInfo(properties) { + this.annotation = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GeneratedCodeInfo annotation. + * @member {Array.} annotation + * @memberof google.protobuf.GeneratedCodeInfo + * @instance + */ + GeneratedCodeInfo.prototype.annotation = $util.emptyArray; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @function create + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo instance + */ + GeneratedCodeInfo.create = function create(properties) { + return new GeneratedCodeInfo(properties); + }; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @function encode + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GeneratedCodeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.annotation != null && message.annotation.length) + for (var i = 0; i < message.annotation.length; ++i) + $root.google.protobuf.GeneratedCodeInfo.Annotation.encode(message.annotation[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GeneratedCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GeneratedCodeInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.annotation && message.annotation.length)) + message.annotation = []; + message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GeneratedCodeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a GeneratedCodeInfo message. + * @function verify + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GeneratedCodeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.annotation != null && message.hasOwnProperty("annotation")) { + if (!Array.isArray(message.annotation)) + return "annotation: array expected"; + for (var i = 0; i < message.annotation.length; ++i) { + var error = $root.google.protobuf.GeneratedCodeInfo.Annotation.verify(message.annotation[i]); + if (error) + return "annotation." + error; + } + } + return null; + }; + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + */ + GeneratedCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo(); + if (object.annotation) { + if (!Array.isArray(object.annotation)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: array expected"); + message.annotation = []; + for (var i = 0; i < object.annotation.length; ++i) { + if (typeof object.annotation[i] !== "object") + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: object expected"); + message.annotation[i] = $root.google.protobuf.GeneratedCodeInfo.Annotation.fromObject(object.annotation[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.GeneratedCodeInfo} message GeneratedCodeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + GeneratedCodeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.annotation = []; + if (message.annotation && message.annotation.length) { + object.annotation = []; + for (var j = 0; j < message.annotation.length; ++j) + object.annotation[j] = $root.google.protobuf.GeneratedCodeInfo.Annotation.toObject(message.annotation[j], options); + } + return object; + }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo + * @instance + * @returns {Object.} JSON object + */ + GeneratedCodeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + GeneratedCodeInfo.Annotation = (function() { + + /** + * Properties of an Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @interface IAnnotation + * @property {Array.|null} [path] Annotation path + * @property {string|null} [sourceFile] Annotation sourceFile + * @property {number|null} [begin] Annotation begin + * @property {number|null} [end] Annotation end + */ + + /** + * Constructs a new Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @classdesc Represents an Annotation. + * @implements IAnnotation + * @constructor + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + */ + function Annotation(properties) { + this.path = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Annotation path. + * @member {Array.} path + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.path = $util.emptyArray; + + /** + * Annotation sourceFile. + * @member {string} sourceFile + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.sourceFile = ""; + + /** + * Annotation begin. + * @member {number} begin + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.begin = 0; + + /** + * Annotation end. + * @member {number} end + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.end = 0; + + /** + * Creates a new Annotation instance using the specified properties. + * @function create + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation instance + */ + Annotation.create = function create(properties) { + return new Annotation(properties); + }; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); + if (message.begin != null && message.hasOwnProperty("begin")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); + if (message.end != null && message.hasOwnProperty("end")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); + return writer; + }; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Annotation message. + * @function verify + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Annotation.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + if (!$util.isString(message.sourceFile)) + return "sourceFile: string expected"; + if (message.begin != null && message.hasOwnProperty("begin")) + if (!$util.isInteger(message.begin)) + return "begin: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + */ + Annotation.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo.Annotation) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.Annotation.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.sourceFile != null) + message.sourceFile = String(object.sourceFile); + if (object.begin != null) + message.begin = object.begin | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.Annotation} message Annotation + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Annotation.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.path = []; + if (options.defaults) { + object.sourceFile = ""; + object.begin = 0; + object.end = 0; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + object.sourceFile = message.sourceFile; + if (message.begin != null && message.hasOwnProperty("begin")) + object.begin = message.begin; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this Annotation to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + * @returns {Object.} JSON object + */ + Annotation.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Annotation; + })(); + + return GeneratedCodeInfo; + })(); + + protobuf.Timestamp = (function() { + + /** + * Properties of a Timestamp. + * @memberof google.protobuf + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos + */ + + /** + * Constructs a new Timestamp. + * @memberof google.protobuf + * @classdesc Represents a Timestamp. + * @implements ITimestamp + * @constructor + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + */ + function Timestamp(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance + */ + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && message.hasOwnProperty("seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && message.hasOwnProperty("nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = reader.int64(); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Timestamp; + })(); + + protobuf.Empty = (function() { + + /** + * Properties of an Empty. + * @memberof google.protobuf + * @interface IEmpty + */ + + /** + * Constructs a new Empty. + * @memberof google.protobuf + * @classdesc Represents an Empty. + * @implements IEmpty + * @constructor + * @param {google.protobuf.IEmpty=} [properties] Properties to set + */ + function Empty(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new Empty instance using the specified properties. + * @function create + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty=} [properties] Properties to set + * @returns {google.protobuf.Empty} Empty instance + */ + Empty.create = function create(properties) { + return new Empty(properties); + }; + + /** + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Empty.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Empty.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Empty message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Empty + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Empty} Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Empty.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Empty message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Empty + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Empty} Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Empty.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Empty message. + * @function verify + * @memberof google.protobuf.Empty + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Empty.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Empty + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Empty} Empty + */ + Empty.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Empty) + return object; + return new $root.google.protobuf.Empty(); + }; + + /** + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.Empty} message Empty + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Empty.toObject = function toObject() { + return {}; + }; + + /** + * Converts this Empty to JSON. + * @function toJSON + * @memberof google.protobuf.Empty + * @instance + * @returns {Object.} JSON object + */ + Empty.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Empty; + })(); + + return protobuf; + })(); + + return google; + })(); + + return $root; +}); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json new file mode 100644 index 00000000000..6f3c1689461 --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.json @@ -0,0 +1,1532 @@ +{ + "nested": { + "google": { + "nested": { + "cloud": { + "nested": { + "bigquery": { + "nested": { + "storage": { + "nested": { + "v1beta1": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", + "java_outer_classname": "TableReferenceProto", + "java_package": "com.google.cloud.bigquery.storage.v1beta1" + }, + "nested": { + "ArrowSchema": { + "fields": { + "serializedSchema": { + "type": "bytes", + "id": 1 + } + } + }, + "ArrowRecordBatch": { + "fields": { + "serializedRecordBatch": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "AvroSchema": { + "fields": { + "schema": { + "type": "string", + "id": 1 + } + } + }, + "AvroRows": { + "fields": { + "serializedBinaryRows": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "TableReadOptions": { + "fields": { + "selectedFields": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "rowRestriction": { + "type": "string", + "id": 2 + } + } + }, + "BigQueryStorage": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateReadSession": { + "requestType": "CreateReadSessionRequest", + "responseType": "ReadSession", + "options": { + "(google.api.http).post": "/v1beta1/{table_reference.project_id=projects/*}", + "(google.api.http).body": "*", + "(google.api.http).additional_bindings.post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", + "(google.api.http).additional_bindings.body": "*", + "(google.api.method_signature)": "table_reference,parent,requested_streams" + } + }, + "ReadRows": { + "requestType": "ReadRowsRequest", + "responseType": "ReadRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}", + "(google.api.method_signature)": "read_position" + } + }, + "BatchCreateReadSessionStreams": { + "requestType": "BatchCreateReadSessionStreamsRequest", + "responseType": "BatchCreateReadSessionStreamsResponse", + "options": { + "(google.api.http).post": "/v1beta1/{session.name=projects/*/sessions/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "session,requested_streams" + } + }, + "FinalizeStream": { + "requestType": "FinalizeStreamRequest", + "responseType": "google.protobuf.Empty", + "options": { + "(google.api.http).post": "/v1beta1/{stream.name=projects/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "stream" + } + }, + "SplitReadStream": { + "requestType": "SplitReadStreamRequest", + "responseType": "SplitReadStreamResponse", + "options": { + "(google.api.http).get": "/v1beta1/{original_stream.name=projects/*/streams/*}", + "(google.api.method_signature)": "original_stream" + } + } + } + }, + "Stream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/Stream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1 + } + } + }, + "StreamPosition": { + "fields": { + "stream": { + "type": "Stream", + "id": 1 + }, + "offset": { + "type": "int64", + "id": 2 + } + } + }, + "ReadSession": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" + }, + "oneofs": { + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] + } + }, + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "expireTime": { + "type": "google.protobuf.Timestamp", + "id": 2 + }, + "avroSchema": { + "type": "AvroSchema", + "id": 5 + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 6 + }, + "streams": { + "rule": "repeated", + "type": "Stream", + "id": 4 + }, + "tableReference": { + "type": "TableReference", + "id": 7 + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 8 + }, + "shardingStrategy": { + "type": "ShardingStrategy", + "id": 9 + } + } + }, + "CreateReadSessionRequest": { + "fields": { + "tableReference": { + "type": "TableReference", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "parent": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 2 + }, + "requestedStreams": { + "type": "int32", + "id": 3 + }, + "readOptions": { + "type": "TableReadOptions", + "id": 4 + }, + "format": { + "type": "DataFormat", + "id": 5 + }, + "shardingStrategy": { + "type": "ShardingStrategy", + "id": 7 + } + } + }, + "DataFormat": { + "values": { + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, + "ARROW": 3 + } + }, + "ShardingStrategy": { + "values": { + "SHARDING_STRATEGY_UNSPECIFIED": 0, + "LIQUID": 1, + "BALANCED": 2 + } + }, + "ReadRowsRequest": { + "fields": { + "readPosition": { + "type": "StreamPosition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "StreamStatus": { + "fields": { + "estimatedRowCount": { + "type": "int64", + "id": 1 + }, + "fractionConsumed": { + "type": "float", + "id": 2 + }, + "progress": { + "type": "Progress", + "id": 4 + }, + "isSplittable": { + "type": "bool", + "id": 3 + } + } + }, + "Progress": { + "fields": { + "atResponseStart": { + "type": "float", + "id": 1 + }, + "atResponseEnd": { + "type": "float", + "id": 2 + } + } + }, + "ThrottleStatus": { + "fields": { + "throttlePercent": { + "type": "int32", + "id": 1 + } + } + }, + "ReadRowsResponse": { + "oneofs": { + "rows": { + "oneof": [ + "avroRows", + "arrowRecordBatch" + ] + } + }, + "fields": { + "avroRows": { + "type": "AvroRows", + "id": 3 + }, + "arrowRecordBatch": { + "type": "ArrowRecordBatch", + "id": 4 + }, + "rowCount": { + "type": "int64", + "id": 6 + }, + "status": { + "type": "StreamStatus", + "id": 2 + }, + "throttleStatus": { + "type": "ThrottleStatus", + "id": 5 + } + } + }, + "BatchCreateReadSessionStreamsRequest": { + "fields": { + "session": { + "type": "ReadSession", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "requestedStreams": { + "type": "int32", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCreateReadSessionStreamsResponse": { + "fields": { + "streams": { + "rule": "repeated", + "type": "Stream", + "id": 1 + } + } + }, + "FinalizeStreamRequest": { + "fields": { + "stream": { + "type": "Stream", + "id": 2 + } + } + }, + "SplitReadStreamRequest": { + "fields": { + "originalStream": { + "type": "Stream", + "id": 1 + }, + "fraction": { + "type": "float", + "id": 2 + } + } + }, + "SplitReadStreamResponse": { + "fields": { + "primaryStream": { + "type": "Stream", + "id": 1 + }, + "remainderStream": { + "type": "Stream", + "id": 2 + } + } + }, + "TableReference": { + "fields": { + "projectId": { + "type": "string", + "id": 1 + }, + "datasetId": { + "type": "string", + "id": 2 + }, + "tableId": { + "type": "string", + "id": 3 + } + } + }, + "TableModifiers": { + "fields": { + "snapshotTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + } + } + } + } + } + } + } + } + } + } + }, + "api": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/api/annotations;annotations", + "java_multiple_files": true, + "java_outer_classname": "ResourceProto", + "java_package": "com.google.api", + "objc_class_prefix": "GAPI", + "cc_enable_arenas": true + }, + "nested": { + "http": { + "type": "HttpRule", + "id": 72295728, + "extend": "google.protobuf.MethodOptions" + }, + "Http": { + "fields": { + "rules": { + "rule": "repeated", + "type": "HttpRule", + "id": 1 + }, + "fullyDecodeReservedExpansion": { + "type": "bool", + "id": 2 + } + } + }, + "HttpRule": { + "oneofs": { + "pattern": { + "oneof": [ + "get", + "put", + "post", + "delete", + "patch", + "custom" + ] + } + }, + "fields": { + "selector": { + "type": "string", + "id": 1 + }, + "get": { + "type": "string", + "id": 2 + }, + "put": { + "type": "string", + "id": 3 + }, + "post": { + "type": "string", + "id": 4 + }, + "delete": { + "type": "string", + "id": 5 + }, + "patch": { + "type": "string", + "id": 6 + }, + "custom": { + "type": "CustomHttpPattern", + "id": 8 + }, + "body": { + "type": "string", + "id": 7 + }, + "responseBody": { + "type": "string", + "id": 12 + }, + "additionalBindings": { + "rule": "repeated", + "type": "HttpRule", + "id": 11 + } + } + }, + "CustomHttpPattern": { + "fields": { + "kind": { + "type": "string", + "id": 1 + }, + "path": { + "type": "string", + "id": 2 + } + } + }, + "methodSignature": { + "rule": "repeated", + "type": "string", + "id": 1051, + "extend": "google.protobuf.MethodOptions" + }, + "defaultHost": { + "type": "string", + "id": 1049, + "extend": "google.protobuf.ServiceOptions" + }, + "oauthScopes": { + "type": "string", + "id": 1050, + "extend": "google.protobuf.ServiceOptions" + }, + "fieldBehavior": { + "rule": "repeated", + "type": "google.api.FieldBehavior", + "id": 1052, + "extend": "google.protobuf.FieldOptions" + }, + "FieldBehavior": { + "values": { + "FIELD_BEHAVIOR_UNSPECIFIED": 0, + "OPTIONAL": 1, + "REQUIRED": 2, + "OUTPUT_ONLY": 3, + "INPUT_ONLY": 4, + "IMMUTABLE": 5 + } + }, + "resourceReference": { + "type": "google.api.ResourceReference", + "id": 1055, + "extend": "google.protobuf.FieldOptions" + }, + "resourceDefinition": { + "rule": "repeated", + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.FileOptions" + }, + "resource": { + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.MessageOptions" + }, + "ResourceDescriptor": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "pattern": { + "rule": "repeated", + "type": "string", + "id": 2 + }, + "nameField": { + "type": "string", + "id": 3 + }, + "history": { + "type": "History", + "id": 4 + }, + "plural": { + "type": "string", + "id": 5 + }, + "singular": { + "type": "string", + "id": 6 + } + }, + "nested": { + "History": { + "values": { + "HISTORY_UNSPECIFIED": 0, + "ORIGINALLY_SINGLE_PATTERN": 1, + "FUTURE_MULTI_PATTERN": 2 + } + } + } + }, + "ResourceReference": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "childType": { + "type": "string", + "id": 2 + } + } + } + } + }, + "protobuf": { + "options": { + "go_package": "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor", + "java_package": "com.google.protobuf", + "java_outer_classname": "DescriptorProtos", + "csharp_namespace": "Google.Protobuf.Reflection", + "objc_class_prefix": "GPB", + "cc_enable_arenas": true, + "optimize_for": "SPEED" + }, + "nested": { + "FileDescriptorSet": { + "fields": { + "file": { + "rule": "repeated", + "type": "FileDescriptorProto", + "id": 1 + } + } + }, + "FileDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "package": { + "type": "string", + "id": 2 + }, + "dependency": { + "rule": "repeated", + "type": "string", + "id": 3 + }, + "publicDependency": { + "rule": "repeated", + "type": "int32", + "id": 10, + "options": { + "packed": false + } + }, + "weakDependency": { + "rule": "repeated", + "type": "int32", + "id": 11, + "options": { + "packed": false + } + }, + "messageType": { + "rule": "repeated", + "type": "DescriptorProto", + "id": 4 + }, + "enumType": { + "rule": "repeated", + "type": "EnumDescriptorProto", + "id": 5 + }, + "service": { + "rule": "repeated", + "type": "ServiceDescriptorProto", + "id": 6 + }, + "extension": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 7 + }, + "options": { + "type": "FileOptions", + "id": 8 + }, + "sourceCodeInfo": { + "type": "SourceCodeInfo", + "id": 9 + }, + "syntax": { + "type": "string", + "id": 12 + } + } + }, + "DescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "field": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 2 + }, + "extension": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 6 + }, + "nestedType": { + "rule": "repeated", + "type": "DescriptorProto", + "id": 3 + }, + "enumType": { + "rule": "repeated", + "type": "EnumDescriptorProto", + "id": 4 + }, + "extensionRange": { + "rule": "repeated", + "type": "ExtensionRange", + "id": 5 + }, + "oneofDecl": { + "rule": "repeated", + "type": "OneofDescriptorProto", + "id": 8 + }, + "options": { + "type": "MessageOptions", + "id": 7 + }, + "reservedRange": { + "rule": "repeated", + "type": "ReservedRange", + "id": 9 + }, + "reservedName": { + "rule": "repeated", + "type": "string", + "id": 10 + } + }, + "nested": { + "ExtensionRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + }, + "options": { + "type": "ExtensionRangeOptions", + "id": 3 + } + } + }, + "ReservedRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + } + } + } + } + }, + "ExtensionRangeOptions": { + "fields": { + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "FieldDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "number": { + "type": "int32", + "id": 3 + }, + "label": { + "type": "Label", + "id": 4 + }, + "type": { + "type": "Type", + "id": 5 + }, + "typeName": { + "type": "string", + "id": 6 + }, + "extendee": { + "type": "string", + "id": 2 + }, + "defaultValue": { + "type": "string", + "id": 7 + }, + "oneofIndex": { + "type": "int32", + "id": 9 + }, + "jsonName": { + "type": "string", + "id": 10 + }, + "options": { + "type": "FieldOptions", + "id": 8 + } + }, + "nested": { + "Type": { + "values": { + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18 + } + }, + "Label": { + "values": { + "LABEL_OPTIONAL": 1, + "LABEL_REQUIRED": 2, + "LABEL_REPEATED": 3 + } + } + } + }, + "OneofDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "options": { + "type": "OneofOptions", + "id": 2 + } + } + }, + "EnumDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "value": { + "rule": "repeated", + "type": "EnumValueDescriptorProto", + "id": 2 + }, + "options": { + "type": "EnumOptions", + "id": 3 + }, + "reservedRange": { + "rule": "repeated", + "type": "EnumReservedRange", + "id": 4 + }, + "reservedName": { + "rule": "repeated", + "type": "string", + "id": 5 + } + }, + "nested": { + "EnumReservedRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + } + } + } + } + }, + "EnumValueDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "number": { + "type": "int32", + "id": 2 + }, + "options": { + "type": "EnumValueOptions", + "id": 3 + } + } + }, + "ServiceDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "method": { + "rule": "repeated", + "type": "MethodDescriptorProto", + "id": 2 + }, + "options": { + "type": "ServiceOptions", + "id": 3 + } + } + }, + "MethodDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "inputType": { + "type": "string", + "id": 2 + }, + "outputType": { + "type": "string", + "id": 3 + }, + "options": { + "type": "MethodOptions", + "id": 4 + }, + "clientStreaming": { + "type": "bool", + "id": 5, + "options": { + "default": false + } + }, + "serverStreaming": { + "type": "bool", + "id": 6, + "options": { + "default": false + } + } + } + }, + "FileOptions": { + "fields": { + "javaPackage": { + "type": "string", + "id": 1 + }, + "javaOuterClassname": { + "type": "string", + "id": 8 + }, + "javaMultipleFiles": { + "type": "bool", + "id": 10, + "options": { + "default": false + } + }, + "javaGenerateEqualsAndHash": { + "type": "bool", + "id": 20, + "options": { + "deprecated": true + } + }, + "javaStringCheckUtf8": { + "type": "bool", + "id": 27, + "options": { + "default": false + } + }, + "optimizeFor": { + "type": "OptimizeMode", + "id": 9, + "options": { + "default": "SPEED" + } + }, + "goPackage": { + "type": "string", + "id": 11 + }, + "ccGenericServices": { + "type": "bool", + "id": 16, + "options": { + "default": false + } + }, + "javaGenericServices": { + "type": "bool", + "id": 17, + "options": { + "default": false + } + }, + "pyGenericServices": { + "type": "bool", + "id": 18, + "options": { + "default": false + } + }, + "phpGenericServices": { + "type": "bool", + "id": 42, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 23, + "options": { + "default": false + } + }, + "ccEnableArenas": { + "type": "bool", + "id": 31, + "options": { + "default": false + } + }, + "objcClassPrefix": { + "type": "string", + "id": 36 + }, + "csharpNamespace": { + "type": "string", + "id": 37 + }, + "swiftPrefix": { + "type": "string", + "id": 39 + }, + "phpClassPrefix": { + "type": "string", + "id": 40 + }, + "phpNamespace": { + "type": "string", + "id": 41 + }, + "phpMetadataNamespace": { + "type": "string", + "id": 44 + }, + "rubyPackage": { + "type": "string", + "id": 45 + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 38, + 38 + ] + ], + "nested": { + "OptimizeMode": { + "values": { + "SPEED": 1, + "CODE_SIZE": 2, + "LITE_RUNTIME": 3 + } + } + } + }, + "MessageOptions": { + "fields": { + "messageSetWireFormat": { + "type": "bool", + "id": 1, + "options": { + "default": false + } + }, + "noStandardDescriptorAccessor": { + "type": "bool", + "id": 2, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "mapEntry": { + "type": "bool", + "id": 7 + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 8, + 8 + ], + [ + 9, + 9 + ] + ] + }, + "FieldOptions": { + "fields": { + "ctype": { + "type": "CType", + "id": 1, + "options": { + "default": "STRING" + } + }, + "packed": { + "type": "bool", + "id": 2 + }, + "jstype": { + "type": "JSType", + "id": 6, + "options": { + "default": "JS_NORMAL" + } + }, + "lazy": { + "type": "bool", + "id": 5, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "weak": { + "type": "bool", + "id": 10, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 4, + 4 + ] + ], + "nested": { + "CType": { + "values": { + "STRING": 0, + "CORD": 1, + "STRING_PIECE": 2 + } + }, + "JSType": { + "values": { + "JS_NORMAL": 0, + "JS_STRING": 1, + "JS_NUMBER": 2 + } + } + } + }, + "OneofOptions": { + "fields": { + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "EnumOptions": { + "fields": { + "allowAlias": { + "type": "bool", + "id": 2 + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 5, + 5 + ] + ] + }, + "EnumValueOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 1, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "ServiceOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 33, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "MethodOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 33, + "options": { + "default": false + } + }, + "idempotencyLevel": { + "type": "IdempotencyLevel", + "id": 34, + "options": { + "default": "IDEMPOTENCY_UNKNOWN" + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "nested": { + "IdempotencyLevel": { + "values": { + "IDEMPOTENCY_UNKNOWN": 0, + "NO_SIDE_EFFECTS": 1, + "IDEMPOTENT": 2 + } + } + } + }, + "UninterpretedOption": { + "fields": { + "name": { + "rule": "repeated", + "type": "NamePart", + "id": 2 + }, + "identifierValue": { + "type": "string", + "id": 3 + }, + "positiveIntValue": { + "type": "uint64", + "id": 4 + }, + "negativeIntValue": { + "type": "int64", + "id": 5 + }, + "doubleValue": { + "type": "double", + "id": 6 + }, + "stringValue": { + "type": "bytes", + "id": 7 + }, + "aggregateValue": { + "type": "string", + "id": 8 + } + }, + "nested": { + "NamePart": { + "fields": { + "namePart": { + "rule": "required", + "type": "string", + "id": 1 + }, + "isExtension": { + "rule": "required", + "type": "bool", + "id": 2 + } + } + } + } + }, + "SourceCodeInfo": { + "fields": { + "location": { + "rule": "repeated", + "type": "Location", + "id": 1 + } + }, + "nested": { + "Location": { + "fields": { + "path": { + "rule": "repeated", + "type": "int32", + "id": 1 + }, + "span": { + "rule": "repeated", + "type": "int32", + "id": 2 + }, + "leadingComments": { + "type": "string", + "id": 3 + }, + "trailingComments": { + "type": "string", + "id": 4 + }, + "leadingDetachedComments": { + "rule": "repeated", + "type": "string", + "id": 6 + } + } + } + } + }, + "GeneratedCodeInfo": { + "fields": { + "annotation": { + "rule": "repeated", + "type": "Annotation", + "id": 1 + } + }, + "nested": { + "Annotation": { + "fields": { + "path": { + "rule": "repeated", + "type": "int32", + "id": 1 + }, + "sourceFile": { + "type": "string", + "id": 2 + }, + "begin": { + "type": "int32", + "id": 3 + }, + "end": { + "type": "int32", + "id": 4 + } + } + } + } + }, + "Timestamp": { + "fields": { + "seconds": { + "type": "int64", + "id": 1 + }, + "nanos": { + "type": "int32", + "id": 2 + } + } + }, + "Empty": { + "fields": {} + } + } + } + } + } + } +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts new file mode 100644 index 00000000000..544140dbe62 --- /dev/null +++ b/handwritten/bigquery-storage/src/index.ts @@ -0,0 +1,24 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v1beta1 from './v1beta1'; +const BigQueryStorageClient = v1beta1.BigQueryStorageClient; +export {v1beta1, BigQueryStorageClient}; +// For compatibility with JavaScript libraries we need to provide this default export: +// tslint:disable-next-line no-default-export +export default {v1beta1, BigQueryStorageClient}; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts new file mode 100644 index 00000000000..5f41cbd976e --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -0,0 +1,833 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as gax from 'google-gax'; +import { + APICallback, + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import * as path from 'path'; + +import * as protosTypes from '../../protos/protos'; +import * as gapicConfig from './big_query_storage_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * BigQuery storage API. + * + * The BigQuery storage API can be used to read data stored in BigQuery. + * @class + * @memberof v1beta1 + */ +export class BigQueryStorageClient { + private _descriptors: Descriptors = {page: {}, stream: {}, longrunning: {}}; + private _innerApiCalls: {[name: string]: Function}; + private _pathTemplates: {[name: string]: gax.PathTemplate}; + private _terminated = false; + auth: gax.GoogleAuth; + bigQueryStorageStub: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryStorageClient. + * + * @param {object} [options] - The configuration object. See the subsequent + * parameters for more details. + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {function} [options.promise] - Custom promise module to use instead + * of native Promises. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + */ + + constructor(opts?: ClientOptions) { + // Ensure that options include the service address and port. + const staticMembers = this.constructor as typeof BigQueryStorageClient; + const servicePath = + opts && opts.servicePath + ? opts.servicePath + : opts && opts.apiEndpoint + ? opts.apiEndpoint + : staticMembers.servicePath; + const port = opts && opts.port ? opts.port : staticMembers.port; + + if (!opts) { + opts = {servicePath, port}; + } + opts.servicePath = opts.servicePath || servicePath; + opts.port = opts.port || port; + opts.clientConfig = opts.clientConfig || {}; + + const isBrowser = typeof window !== 'undefined'; + if (isBrowser) { + opts.fallback = true; + } + // If we are in browser, we are already using fallback because of the + // "browser" field in package.json. + // But if we were explicitly requested to use fallback, let's do it now. + const gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options + // sent to the client. + opts.scopes = (this.constructor as typeof BigQueryStorageClient).scopes; + const gaxGrpc = new gaxModule.GrpcClient(opts); + + // Save the auth object to the client, for use by other methods. + this.auth = gaxGrpc.auth as gax.GoogleAuth; + + // Determine the client header string. + const clientHeader = [`gax/${gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + // For Node.js, pass the path to JSON proto file. + // For browsers, pass the JSON content. + + const nodejsProtoPath = path.join( + __dirname, + '..', + '..', + 'protos', + 'protos.json' + ); + const protos = gaxGrpc.loadProto( + opts.fallback ? require('../../protos/protos.json') : nodejsProtoPath + ); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this._pathTemplates = { + readSessionPathTemplate: new gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + streamPathTemplate: new gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this._descriptors.stream = { + readRows: new gaxModule.StreamDescriptor(gax.StreamType.SERVER_STREAMING), + }; + + // Put together the default options sent with requests. + const defaults = gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this._innerApiCalls = {}; + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1beta1.BigQueryStorage. + this.bigQueryStorageStub = gaxGrpc.createStub( + opts.fallback + ? (protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' + ) + : // tslint:disable-next-line no-any + (protos as any).google.cloud.bigquery.storage.v1beta1.BigQueryStorage, + opts + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryStorageStubMethods = [ + 'createReadSession', + 'readRows', + 'batchCreateReadSessionStreams', + 'finalizeStream', + 'splitReadStream', + ]; + + for (const methodName of bigQueryStorageStubMethods) { + const innerCallPromise = this.bigQueryStorageStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + return stub[methodName].apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const apiCall = gaxModule.createApiCall( + innerCallPromise, + defaults[methodName], + this._descriptors.page[methodName] || + this._descriptors.stream[methodName] || + this._descriptors.longrunning[methodName] + ); + + this._innerApiCalls[methodName] = ( + argument: {}, + callOptions?: CallOptions, + callback?: APICallback + ) => { + return apiCall(argument, callOptions, callback); + }; + } + } + + /** + * The DNS address for this API service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/bigquery.readonly', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @param {function(Error, string)} callback - the callback to + * be called with the current project Id. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined, + {} | undefined + > + ): void; + /** + * Creates a new read session. A read session divides the contents of a + * BigQuery table into one or more streams, which can then be used to read + * data from the table. The read session also specifies properties of the + * data to be read, such as a list of columns or a push-down filter describing + * the rows to be returned. + * + * A particular row can be read by at most one stream. When the caller has + * reached the end of each stream in the session, then all the data in the + * table has been read. + * + * Read sessions automatically expire 24 hours after they are created and do + * not require manual clean-up by the caller. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.TableReference} request.tableReference + * Required. Reference to the table to read. + * @param {string} request.parent + * Required. String of the form `projects/{project_id}` indicating the + * project this ReadSession is associated with. This is the project that will + * be billed for usage. + * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} request.tableModifiers + * Any modifiers to the Table (e.g. snapshot timestamp). + * @param {number} request.requestedStreams + * Initial number of streams. If unset or 0, we will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table and + * the maximum amount of parallelism allowed by the system. + * + * Streams must be read starting from offset 0. + * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} request.readOptions + * Read options for this session (e.g. column selection, filters). + * @param {google.cloud.bigquery.storage.v1beta1.DataFormat} request.format + * Data output format. Currently default to Avro. + * @param {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} request.shardingStrategy + * The strategy to use for distributing data among multiple streams. Currently + * defaults to liquid sharding. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1beta1.ReadSession}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'table_reference.project_id': request.tableReference!.projectId || '', + 'table_reference.dataset_id': request.tableReference!.datasetId || '', + }); + return this._innerApiCalls.createReadSession(request, options, callback); + } + batchCreateReadSessionStreams( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined + ), + {} | undefined + ] + >; + batchCreateReadSessionStreams( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined, + {} | undefined + > + ): void; + /** + * Creates additional streams for a ReadSession. This API can be used to + * dynamically adjust the parallelism of a batch processing task upwards by + * adding additional workers. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} request.session + * Required. Must be a non-expired session obtained from a call to + * CreateReadSession. Only the name field needs to be set. + * @param {number} request.requestedStreams + * Required. Number of new streams requested. Must be positive. + * Number of added streams may be less than this, see CreateReadSessionRequest + * for more information. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + batchCreateReadSessionStreams( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'session.name': request.session!.name || '', + }); + return this._innerApiCalls.batchCreateReadSessionStreams( + request, + options, + callback + ); + } + finalizeStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.protobuf.IEmpty, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined + ), + {} | undefined + ] + >; + finalizeStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.protobuf.IEmpty, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined, + {} | undefined + > + ): void; + /** + * Triggers the graceful termination of a single stream in a ReadSession. This + * API can be used to dynamically adjust the parallelism of a batch processing + * task downwards without losing data. + * + * This API does not delete the stream -- it remains visible in the + * ReadSession, and any data processed by the stream is not released to other + * streams. However, no additional data will be assigned to the stream once + * this call completes. Callers must continue reading data on the stream until + * the end of the stream is reached so that data which has already been + * assigned to the stream will be processed. + * + * This method will return an error if there are no other live streams + * in the Session, or if SplitReadStream() has been called on the given + * Stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream + * Stream to finalize. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + finalizeStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.protobuf.IEmpty, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.protobuf.IEmpty, + | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.protobuf.IEmpty, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'stream.name': request.stream!.name || '', + }); + return this._innerApiCalls.finalizeStream(request, options, callback); + } + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined, + {} | undefined + > + ): void; + /** + * Splits a given read stream into two Streams. These streams are referred to + * as the primary and the residual of the split. The original stream can still + * be read from in the same manner as before. Both of the returned streams can + * also be read from, and the total rows return by both child streams will be + * the same as the rows read from the original stream. + * + * Moreover, the two child streams will be allocated back to back in the + * original Stream. Concretely, it is guaranteed that for streams Original, + * Primary, and Residual, that Original[0-j] = Primary[0-j] and + * Original[j-n] = Residual[0-m] once the streams have been read to + * completion. + * + * This method is guaranteed to be idempotent. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.originalStream + * Stream to split. + * @param {number} request.fraction + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to to a data storage boundary on the server side. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'original_stream.name': request.originalStream!.name || '', + }); + return this._innerApiCalls.splitReadStream(request, options, callback); + } + + /** + * Reads rows from the table in the format prescribed by the read session. + * Each response contains one or more table rows, up to a maximum of 10 MiB + * per response; read requests which attempt to read individual rows larger + * than this will fail. + * + * Each request also returns a set of stream statistics reflecting the + * estimated total number of rows in the read stream. This number is computed + * based on the total table size and the number of active streams in the read + * session, and may change as other streams continue to read data. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} request.readPosition + * Required. Identifier of the position in the stream to start reading from. + * The offset requested must be less than the last row read from ReadRows. + * Requesting a larger offset is undefined. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. + */ + readRows( + request?: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, + options?: gax.CallOptions + ): gax.CancellableStream { + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'read_position.stream.name': request.readPosition!.stream!.name || '', + }); + return this._innerApiCalls.readRows(request, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this._pathTemplates.readSessionPathTemplate.render({ + project, + location, + session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified stream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} stream + * @returns {string} Resource name string. + */ + streamPath(project: string, location: string, stream: string) { + return this._pathTemplates.streamPathTemplate.render({ + project, + location, + stream, + }); + } + + /** + * Parse the project from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromStreamName(streamName: string) { + return this._pathTemplates.streamPathTemplate.match(streamName).project; + } + + /** + * Parse the location from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromStreamName(streamName: string) { + return this._pathTemplates.streamPathTemplate.match(streamName).location; + } + + /** + * Parse the stream from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromStreamName(streamName: string) { + return this._pathTemplates.streamPathTemplate.match(streamName).stream; + } + + /** + * Terminate the GRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + */ + close(): Promise { + if (!this._terminated) { + return this.bigQueryStorageStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json new file mode 100644 index 00000000000..003cb084ff8 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json @@ -0,0 +1,54 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1beta1.BigQueryStorage": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateReadSession": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ReadRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchCreateReadSessionStreams": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FinalizeStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SplitReadStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json new file mode 100644 index 00000000000..0b8010758a6 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1beta1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/read_options.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto" +] diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts new file mode 100644 index 00000000000..7346292b381 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -0,0 +1,19 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {BigQueryStorageClient} from './big_query_storage_client'; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata new file mode 100644 index 00000000000..ab38fb4bc6d --- /dev/null +++ b/handwritten/bigquery-storage/synth.metadata @@ -0,0 +1,38 @@ +{ + "updateTime": "2020-02-18T22:56:39.544261Z", + "sources": [ + { + "git": { + "name": ".", + "remote": "sso://user/steffanyb/nodejs-bigquery-storage", + "sha": "6152883ee18789d86f9d44ffc8433b55b7889118" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "ab2685d8d3a0e191dc8aef83df36773c07cb3d06", + "internalRef": "295738415" + } + }, + { + "template": { + "name": "node_library", + "origin": "synthtool.gcp", + "version": "2020.2.4" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "bigquerystorage", + "apiVersion": "v1beta1", + "language": "typescript", + "generator": "gapic-generator-typescript" + } + } + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index f9620996bce..ec0cb3d4f47 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -31,7 +31,7 @@ 'package-name': f'@google-cloud/bigquery-storage', }, ) - s.copy(library, excludes=['README.md']) + s.copy(library, excludes=['package.json', 'README.md']) # Copy common templates common_templates = gcp.CommonTemplates() templates = common_templates.node_library(source_location='build/src') diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js new file mode 100644 index 00000000000..e1637d5246a --- /dev/null +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const storage = require('@google-cloud/bigquery-storage'); + +function main() { + const bigQueryStorageClient = new storage.BigQueryStorageClient(); +} + +main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts new file mode 100644 index 00000000000..f0ce2eb8a76 --- /dev/null +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {BigQueryStorageClient} from '@google-cloud/bigquery-storage'; + +function main() { + const bigQueryStorageClient = new BigQueryStorageClient(); +} + +main(); diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts new file mode 100644 index 00000000000..c9aa74ec221 --- /dev/null +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('typescript consumer tests', () => { + it('should have correct type signature for typescript users', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), // path to your module. + sample: { + description: 'typescript based user can use the type definitions', + ts: readFileSync( + './system-test/fixtures/sample/src/index.ts' + ).toString(), + }, + }; + await packNTest(options); // will throw upon error. + }); + + it('should have correct type signature for javascript users', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), // path to your module. + sample: { + description: 'typescript based user can use the type definitions', + ts: readFileSync( + './system-test/fixtures/sample/src/index.js' + ).toString(), + }, + }; + await packNTest(options); // will throw upon error. + }); +}); diff --git a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts new file mode 100644 index 00000000000..c087a51c2d8 --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts @@ -0,0 +1,372 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protosTypes from '../protos/protos'; +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +const bigquerystorageModule = require('../src'); + +import {PassThrough} from 'stream'; + +const FAKE_STATUS_CODE = 1; +class FakeError { + name: string; + message: string; + code: number; + constructor(n: number) { + this.name = 'fakeName'; + this.message = 'fake message'; + this.code = n; + } +} +const error = new FakeError(FAKE_STATUS_CODE); +export interface Callback { + (err: FakeError | null, response?: {} | null): void; +} + +export class Operation { + constructor() {} + promise() {} +} +function mockSimpleGrpcMethod( + expectedRequest: {}, + response: {} | null, + error: FakeError | null +) { + return (actualRequest: {}, options: {}, callback: Callback) => { + assert.deepStrictEqual(actualRequest, expectedRequest); + if (error) { + callback(error); + } else if (response) { + callback(null, response); + } else { + callback(null); + } + }; +} +function mockServerStreamingGrpcMethod( + expectedRequest: {}, + response: {} | null, + error: FakeError | null +) { + return (actualRequest: {}) => { + assert.deepStrictEqual(actualRequest, expectedRequest); + const mockStream = new PassThrough({ + objectMode: true, + transform: (chunk: {}, enc: {}, callback: Callback) => { + if (error) { + callback(error); + } else { + callback(null, response); + } + }, + }); + return mockStream; + }; +} +describe('v1beta1.BigQueryStorageClient', () => { + it('has servicePath', () => { + const servicePath = + bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; + assert(servicePath); + }); + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; + assert(apiEndpoint); + }); + it('has port', () => { + const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; + assert(port); + assert(typeof port === 'number'); + }); + it('should create a client with no option', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + assert(client); + }); + it('should create a client with gRPC fallback', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + fallback: true, + }); + assert(client); + }); + describe('createReadSession', () => { + it('invokes createReadSession without error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.createReadSession(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes createReadSession with error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( + request, + null, + error + ); + client.createReadSession(request, (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + }); + }); + }); + describe('batchCreateReadSessionStreams', () => { + it('invokes batchCreateReadSessionStreams without error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; + request.session = {}; + request.session.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.batchCreateReadSessionStreams = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.batchCreateReadSessionStreams(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes batchCreateReadSessionStreams with error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; + request.session = {}; + request.session.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.batchCreateReadSessionStreams = mockSimpleGrpcMethod( + request, + null, + error + ); + client.batchCreateReadSessionStreams( + request, + (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + } + ); + }); + }); + describe('finalizeStream', () => { + it('invokes finalizeStream without error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; + request.stream = {}; + request.stream.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.finalizeStream = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.finalizeStream(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes finalizeStream with error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; + request.stream = {}; + request.stream.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.finalizeStream = mockSimpleGrpcMethod( + request, + null, + error + ); + client.finalizeStream(request, (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + }); + }); + }); + describe('splitReadStream', () => { + it('invokes splitReadStream without error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; + request.originalStream = {}; + request.originalStream.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.splitReadStream(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes splitReadStream with error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; + request.originalStream = {}; + request.originalStream.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( + request, + null, + error + ); + client.splitReadStream(request, (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + }); + }); + }); + describe('readRows', () => { + it('invokes readRows without error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request = { + readPosition: {stream: ''}, + }; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( + request, + expectedResponse, + null + ); + const stream = client.readRows(request); + stream.on('data', (response: {}) => { + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + stream.on('error', (err: FakeError) => { + done(err); + }); + stream.write(); + }); + it('invokes readRows with error', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Mock request + const request = { + readPosition: {stream: ''}, + }; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( + request, + null, + error + ); + const stream = client.readRows(request); + stream.on('data', () => { + assert.fail(); + }); + stream.on('error', (err: FakeError) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + done(); + }); + stream.write(); + }); + }); +}); diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json new file mode 100644 index 00000000000..613d35597b5 --- /dev/null +++ b/handwritten/bigquery-storage/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2016", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/handwritten/bigquery-storage/tslint.json b/handwritten/bigquery-storage/tslint.json new file mode 100644 index 00000000000..617dc975bae --- /dev/null +++ b/handwritten/bigquery-storage/tslint.json @@ -0,0 +1,3 @@ +{ + "extends": "gts/tslint.json" +} diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js new file mode 100644 index 00000000000..ad9db41c80c --- /dev/null +++ b/handwritten/bigquery-storage/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'BigQueryStorage', + filename: './big-query-storage.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From f6d45d522bf58d93742b342999ba124d6b2a72b1 Mon Sep 17 00:00:00 2001 From: Xiaozhen Liu Date: Wed, 26 Feb 2020 16:42:07 -0800 Subject: [PATCH 004/333] feat: export protos in src/index.ts --- handwritten/bigquery-storage/src/index.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 544140dbe62..b8de7b5c834 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -22,3 +22,5 @@ export {v1beta1, BigQueryStorageClient}; // For compatibility with JavaScript libraries we need to provide this default export: // tslint:disable-next-line no-default-export export default {v1beta1, BigQueryStorageClient}; +import * as protos from '../protos/protos'; +export {protos}; From 2bb02514e0b994fee9e77a83ea7fe0d06c0cc969 Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Wed, 26 Feb 2020 21:10:45 -0800 Subject: [PATCH 005/333] build: add publish.yml enabling GitHub app for publishes (#12) --- handwritten/bigquery-storage/.github/publish.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/publish.yml diff --git a/handwritten/bigquery-storage/.github/publish.yml b/handwritten/bigquery-storage/.github/publish.yml new file mode 100644 index 00000000000..e69de29bb2d From cf9704b9e99ea1a914df7b26acdd3477f909f8cb Mon Sep 17 00:00:00 2001 From: Summer Ji Date: Thu, 27 Feb 2020 11:52:46 -0800 Subject: [PATCH 006/333] chore: update jsdoc.js (#14) --- handwritten/bigquery-storage/.jsdoc.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 719a77bf2e9..908288f55c1 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -36,11 +36,14 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2018 Google, LLC.', + copyright: 'Copyright 2019 Google, LLC.', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', - theme: 'lumen' + theme: 'lumen', + default: { + "outputSourceFiles": false + } }, markdown: { idInHeadings: true From 2330d600e96de296e61901189f59dc01faa78f7d Mon Sep 17 00:00:00 2001 From: Summer Ji Date: Thu, 27 Feb 2020 15:38:41 -0800 Subject: [PATCH 007/333] chore: update .jsdoc.js by add protos and remove double quotes (#15) --- handwritten/bigquery-storage/.jsdoc.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 908288f55c1..079bc593259 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -31,7 +31,8 @@ module.exports = { source: { excludePattern: '(^|\\/|\\\\)[._]', include: [ - 'build/src' + 'build/src', + 'protos' ], includePattern: '\\.js$' }, @@ -42,7 +43,7 @@ module.exports = { systemName: '@google-cloud/bigquery-storage', theme: 'lumen', default: { - "outputSourceFiles": false + outputSourceFiles: false } }, markdown: { From 597afed14e6be66242bc91f6d098e1a9221d58c7 Mon Sep 17 00:00:00 2001 From: Summer Ji Date: Fri, 28 Feb 2020 16:36:05 -0800 Subject: [PATCH 008/333] chore: update jsdoc with macro license (#17) --- handwritten/bigquery-storage/.jsdoc.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 079bc593259..30eccb20f81 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -12,6 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. // +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** 'use strict'; From 776905b27cf31e32e1899a27798f67f695e56ef6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 2 Mar 2020 11:15:03 -0800 Subject: [PATCH 009/333] chore: release 1.0.0 (#6) * created CHANGELOG.md [ci skip] * updated package.json [ci skip] * updated samples/package.json [ci skip] Co-authored-by: Benjamin E. Coe --- handwritten/bigquery-storage/CHANGELOG.md | 13 +++++++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 handwritten/bigquery-storage/CHANGELOG.md diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md new file mode 100644 index 00000000000..7d8184e95bc --- /dev/null +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -0,0 +1,13 @@ +# Changelog + +## 1.0.0 (2020-02-29) + + +### ⚠ BREAKING CHANGES + +* initial generation of library (#1) + +### Features + +* export protos in src/index.ts ([68b922a](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/68b922a4c242a6ad2e360758ef0658ca8451b62f)) +* initial generation of library ([#1](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/1)) ([bd42fbd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/bd42fbd45616adaf36cdf197d2b0f3c811025e39)) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 90fd02d8399..52262d0a14d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "0.1.0", + "version": "1.0.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From dd8e90bef21c3d818005270c2fa751dabe0f5559 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 3 Mar 2020 09:35:38 -0800 Subject: [PATCH 010/333] chore: update linkinator config and auto-generated test (#19) * [CHANGE ME] Re-generated to pick up changes in the API or client library generator. * generator fix Co-authored-by: Alexander Fenster --- .../bigquery-storage/linkinator.config.json | 5 +---- handwritten/bigquery-storage/synth.metadata | 14 ++++---------- .../test/gapic-big_query_storage-v1beta1.ts | 14 ++++++++------ 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json index a4a25c7baee..b555215ca02 100644 --- a/handwritten/bigquery-storage/linkinator.config.json +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -3,9 +3,6 @@ "skip": [ "https://codecov.io/gh/googleapis/", "www.googleapis.com", - "img.shields.io", - "https://www.npmjs.org/package/@google-cloud/bigquery-storage", - "https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples", - "https://github.com/googleapis/nodejs-bigquery-storage/blob/master/samples/quickstart.js" + "img.shields.io" ] } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index ab38fb4bc6d..6d57d60c02c 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,19 +1,13 @@ { - "updateTime": "2020-02-18T22:56:39.544261Z", + "updateTime": "2020-03-03T17:23:33.988627Z", "sources": [ - { - "git": { - "name": ".", - "remote": "sso://user/steffanyb/nodejs-bigquery-storage", - "sha": "6152883ee18789d86f9d44ffc8433b55b7889118" - } - }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "ab2685d8d3a0e191dc8aef83df36773c07cb3d06", - "internalRef": "295738415" + "sha": "a78ed801b82a5c6d9c5368e24b1412212e541bb7", + "internalRef": "298607357", + "log": "a78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n" } }, { diff --git a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts index c087a51c2d8..4b9fa589b7a 100644 --- a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts @@ -319,9 +319,10 @@ describe('v1beta1.BigQueryStorageClient', () => { projectId: 'bogus', }); // Mock request - const request = { - readPosition: {stream: ''}, - }; + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; + request.readPosition = {}; + request.readPosition.stream = {}; + request.readPosition.stream.name = ''; // Mock response const expectedResponse = {}; // Mock gRPC layer @@ -346,9 +347,10 @@ describe('v1beta1.BigQueryStorageClient', () => { projectId: 'bogus', }); // Mock request - const request = { - readPosition: {stream: ''}, - }; + const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; + request.readPosition = {}; + request.readPosition.stream = {}; + request.readPosition.stream.name = ''; // Mock response const expectedResponse = {}; // Mock gRPC layer From 254e7ca0a54a788066ce496b9b6b82ea3a7dc35b Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Thu, 5 Mar 2020 07:54:41 -0800 Subject: [PATCH 011/333] docs: updated comments for TableReadOptions --- .../cloud/bigquery/storage/v1beta1/read_options.proto | 3 +-- handwritten/bigquery-storage/synth.metadata | 8 ++++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 9591deba7f4..8ed9b73f6cf 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -29,8 +29,7 @@ message TableReadOptions { repeated string selected_fields = 1; // Optional. SQL text filtering statement, similar to a WHERE clause in - // a query. Currently, only a single predicate that is a comparison between - // a column and a constant value is supported. Aggregates are not supported. + // a query. Aggregates are not supported. // // Examples: "int_field > 5" // "date_field = CAST('2014-9-27' as DATE)" diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 6d57d60c02c..3600eb7341d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,13 +1,13 @@ { - "updateTime": "2020-03-03T17:23:33.988627Z", + "updateTime": "2020-03-05T12:45:42.538924Z", "sources": [ { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a78ed801b82a5c6d9c5368e24b1412212e541bb7", - "internalRef": "298607357", - "log": "a78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n" + "sha": "638253bf86d1ce1c314108a089b7351440c2f0bf", + "internalRef": "298971070", + "log": "638253bf86d1ce1c314108a089b7351440c2f0bf\nfix: add java_multiple_files option for automl text_sentiment.proto\n\nPiperOrigin-RevId: 298971070\n\n373d655703bf914fb8b0b1cc4071d772bac0e0d1\nUpdate Recs AI Beta public bazel file\n\nPiperOrigin-RevId: 298961623\n\ndcc5d00fc8a8d8b56f16194d7c682027b2c66a3b\nfix: add java_multiple_files option for automl classification.proto\n\nPiperOrigin-RevId: 298953301\n\na3f791827266f3496a6a5201d58adc4bb265c2a3\nchore: automl/v1 publish annotations and retry config\n\nPiperOrigin-RevId: 298942178\n\n01c681586d8d6dbd60155289b587aee678530bd9\nMark return_immediately in PullRequest deprecated.\n\nPiperOrigin-RevId: 298893281\n\nc9f5e9c4bfed54bbd09227e990e7bded5f90f31c\nRemove out of date documentation for predicate support on the Storage API\n\nPiperOrigin-RevId: 298883309\n\nfd5b3b8238d783b04692a113ffe07c0363f5de0f\ngenerate webrisk v1 proto\n\nPiperOrigin-RevId: 298847934\n\n541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\n" } }, { From 4cd0ff74b9dbded73fc9c31f44f1991af3aac43a Mon Sep 17 00:00:00 2001 From: "gcf-merge-on-green[bot]" <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> Date: Fri, 6 Mar 2020 00:08:06 +0000 Subject: [PATCH 012/333] feat: deferred client initialization (#23) This PR includes changes from https://github.com/googleapis/gapic-generator-typescript/pull/317 that will move the asynchronous initialization and authentication from the client constructor to an `initialize()` method. This method will be automatically called when the first RPC call is performed. The client library usage has not changed, there is no need to update any code. If you want to make sure the client is authenticated _before_ the first RPC call, you can do ```js await client.initialize(); ``` manually before calling any client method. --- .../src/v1beta1/big_query_storage_client.ts | 79 +++++++++++++------ handwritten/bigquery-storage/synth.metadata | 8 +- .../test/gapic-big_query_storage-v1beta1.ts | 36 +++++++++ 3 files changed, 97 insertions(+), 26 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 5f41cbd976e..e85d0072dc2 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -43,8 +43,13 @@ export class BigQueryStorageClient { private _innerApiCalls: {[name: string]: Function}; private _pathTemplates: {[name: string]: gax.PathTemplate}; private _terminated = false; + private _opts: ClientOptions; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; auth: gax.GoogleAuth; - bigQueryStorageStub: Promise<{[name: string]: Function}>; + bigQueryStorageStub?: Promise<{[name: string]: Function}>; /** * Construct an instance of BigQueryStorageClient. @@ -68,8 +73,6 @@ export class BigQueryStorageClient { * app is running in an environment which supports * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. - * @param {function} [options.promise] - Custom promise module to use instead - * of native Promises. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. */ @@ -99,25 +102,28 @@ export class BigQueryStorageClient { // If we are in browser, we are already using fallback because of the // "browser" field in package.json. // But if we were explicitly requested to use fallback, let's do it now. - const gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; + this._gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; // Create a `gaxGrpc` object, with any grpc-specific options // sent to the client. opts.scopes = (this.constructor as typeof BigQueryStorageClient).scopes; - const gaxGrpc = new gaxModule.GrpcClient(opts); + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; // Save the auth object to the client, for use by other methods. - this.auth = gaxGrpc.auth as gax.GoogleAuth; + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; // Determine the client header string. - const clientHeader = [`gax/${gaxModule.version}`, `gapic/${version}`]; + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; if (typeof process !== 'undefined' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { - clientHeader.push(`gl-web/${gaxModule.version}`); + clientHeader.push(`gl-web/${this._gaxModule.version}`); } if (!opts.fallback) { - clientHeader.push(`grpc/${gaxGrpc.grpcVersion}`); + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { clientHeader.push(`${opts.libName}/${opts.libVersion}`); @@ -133,7 +139,7 @@ export class BigQueryStorageClient { 'protos', 'protos.json' ); - const protos = gaxGrpc.loadProto( + this._protos = this._gaxGrpc.loadProto( opts.fallback ? require('../../protos/protos.json') : nodejsProtoPath ); @@ -141,10 +147,10 @@ export class BigQueryStorageClient { // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. this._pathTemplates = { - readSessionPathTemplate: new gaxModule.PathTemplate( + readSessionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}' ), - streamPathTemplate: new gaxModule.PathTemplate( + streamPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/streams/{stream}' ), }; @@ -152,11 +158,13 @@ export class BigQueryStorageClient { // Some of the methods on this service provide streaming responses. // Provide descriptors for these. this._descriptors.stream = { - readRows: new gaxModule.StreamDescriptor(gax.StreamType.SERVER_STREAMING), + readRows: new this._gaxModule.StreamDescriptor( + gax.StreamType.SERVER_STREAMING + ), }; // Put together the default options sent with requests. - const defaults = gaxGrpc.constructSettings( + this._defaults = this._gaxGrpc.constructSettings( 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, @@ -167,17 +175,36 @@ export class BigQueryStorageClient { // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. this._innerApiCalls = {}; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryStorageStub) { + return this.bigQueryStorageStub; + } // Put together the "service stub" for // google.cloud.bigquery.storage.v1beta1.BigQueryStorage. - this.bigQueryStorageStub = gaxGrpc.createStub( - opts.fallback - ? (protos as protobuf.Root).lookupService( + this.bigQueryStorageStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' ) : // tslint:disable-next-line no-any - (protos as any).google.cloud.bigquery.storage.v1beta1.BigQueryStorage, - opts + (this._protos as any).google.cloud.bigquery.storage.v1beta1 + .BigQueryStorage, + this._opts ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -203,9 +230,9 @@ export class BigQueryStorageClient { } ); - const apiCall = gaxModule.createApiCall( + const apiCall = this._gaxModule.createApiCall( innerCallPromise, - defaults[methodName], + this._defaults[methodName], this._descriptors.page[methodName] || this._descriptors.stream[methodName] || this._descriptors.longrunning[methodName] @@ -219,6 +246,8 @@ export class BigQueryStorageClient { return apiCall(argument, callOptions, callback); }; } + + return this.bigQueryStorageStub; } /** @@ -386,6 +415,7 @@ export class BigQueryStorageClient { 'table_reference.project_id': request.tableReference!.projectId || '', 'table_reference.dataset_id': request.tableReference!.datasetId || '', }); + this.initialize(); return this._innerApiCalls.createReadSession(request, options, callback); } batchCreateReadSessionStreams( @@ -473,6 +503,7 @@ export class BigQueryStorageClient { ] = gax.routingHeader.fromParams({ 'session.name': request.session!.name || '', }); + this.initialize(); return this._innerApiCalls.batchCreateReadSessionStreams( request, options, @@ -570,6 +601,7 @@ export class BigQueryStorageClient { ] = gax.routingHeader.fromParams({ 'stream.name': request.stream!.name || '', }); + this.initialize(); return this._innerApiCalls.finalizeStream(request, options, callback); } splitReadStream( @@ -670,6 +702,7 @@ export class BigQueryStorageClient { ] = gax.routingHeader.fromParams({ 'original_stream.name': request.originalStream!.name || '', }); + this.initialize(); return this._innerApiCalls.splitReadStream(request, options, callback); } @@ -708,6 +741,7 @@ export class BigQueryStorageClient { ] = gax.routingHeader.fromParams({ 'read_position.stream.name': request.readPosition!.stream!.name || '', }); + this.initialize(); return this._innerApiCalls.readRows(request, options); } @@ -822,8 +856,9 @@ export class BigQueryStorageClient { * The client will no longer be usable and all future behavior is undefined. */ close(): Promise { + this.initialize(); if (!this._terminated) { - return this.bigQueryStorageStub.then(stub => { + return this.bigQueryStorageStub!.then(stub => { this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 3600eb7341d..4d0b0a1a89b 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,13 +1,13 @@ { - "updateTime": "2020-03-05T12:45:42.538924Z", + "updateTime": "2020-03-05T23:02:10.150448Z", "sources": [ { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "638253bf86d1ce1c314108a089b7351440c2f0bf", - "internalRef": "298971070", - "log": "638253bf86d1ce1c314108a089b7351440c2f0bf\nfix: add java_multiple_files option for automl text_sentiment.proto\n\nPiperOrigin-RevId: 298971070\n\n373d655703bf914fb8b0b1cc4071d772bac0e0d1\nUpdate Recs AI Beta public bazel file\n\nPiperOrigin-RevId: 298961623\n\ndcc5d00fc8a8d8b56f16194d7c682027b2c66a3b\nfix: add java_multiple_files option for automl classification.proto\n\nPiperOrigin-RevId: 298953301\n\na3f791827266f3496a6a5201d58adc4bb265c2a3\nchore: automl/v1 publish annotations and retry config\n\nPiperOrigin-RevId: 298942178\n\n01c681586d8d6dbd60155289b587aee678530bd9\nMark return_immediately in PullRequest deprecated.\n\nPiperOrigin-RevId: 298893281\n\nc9f5e9c4bfed54bbd09227e990e7bded5f90f31c\nRemove out of date documentation for predicate support on the Storage API\n\nPiperOrigin-RevId: 298883309\n\nfd5b3b8238d783b04692a113ffe07c0363f5de0f\ngenerate webrisk v1 proto\n\nPiperOrigin-RevId: 298847934\n\n541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\n" + "sha": "f0b581b5bdf803e45201ecdb3688b60e381628a8", + "internalRef": "299181282", + "log": "f0b581b5bdf803e45201ecdb3688b60e381628a8\nfix: recommendationengine/v1beta1 update some comments\n\nPiperOrigin-RevId: 299181282\n\n10e9a0a833dc85ff8f05b2c67ebe5ac785fe04ff\nbuild: add generated BUILD file for Routes Preferred API\n\nPiperOrigin-RevId: 299164808\n\n86738c956a8238d7c77f729be78b0ed887a6c913\npublish v1p1beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299152383\n\n73d9f2ad4591de45c2e1f352bc99d70cbd2a6d95\npublish v1: update with absolute address in comments\n\nPiperOrigin-RevId: 299147194\n\nd2158f24cb77b0b0ccfe68af784c6a628705e3c6\npublish v1beta2: update with absolute address in comments\n\nPiperOrigin-RevId: 299147086\n\n7fca61292c11b4cd5b352cee1a50bf88819dd63b\npublish v1p2beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146903\n\n583b7321624736e2c490e328f4b1957335779295\npublish v1p3beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146674\n\n" } }, { diff --git a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts index 4b9fa589b7a..28001512c10 100644 --- a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts @@ -105,12 +105,30 @@ describe('v1beta1.BigQueryStorageClient', () => { }); assert(client); }); + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + await client.initialize(); + assert(client.bigQueryStorageStub); + }); + it('has close method', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.close(); + }); describe('createReadSession', () => { it('invokes createReadSession without error', done => { const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; request.tableReference = {}; @@ -137,6 +155,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; request.tableReference = {}; @@ -165,6 +185,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; request.session = {}; @@ -189,6 +211,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; request.session = {}; @@ -218,6 +242,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; request.stream = {}; @@ -242,6 +268,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; request.stream = {}; @@ -268,6 +296,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; request.originalStream = {}; @@ -292,6 +322,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; request.originalStream = {}; @@ -318,6 +350,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; request.readPosition = {}; @@ -346,6 +380,8 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); + // Initialize client before mocking + client.initialize(); // Mock request const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; request.readPosition = {}; From 024f5a4fc5cee12dfae209d5e264a5a1ffcd1a70 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Thu, 5 Mar 2020 17:27:15 -0800 Subject: [PATCH 013/333] build: update linkinator config (#25) --- handwritten/bigquery-storage/linkinator.config.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json index b555215ca02..29a223b6db6 100644 --- a/handwritten/bigquery-storage/linkinator.config.json +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -4,5 +4,7 @@ "https://codecov.io/gh/googleapis/", "www.googleapis.com", "img.shields.io" - ] + ], + "silent": true, + "concurrency": 10 } From 8c2ae8c1a2433a064acc9c96db7e3b0a9070bf94 Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Fri, 6 Mar 2020 15:00:29 -0800 Subject: [PATCH 014/333] build(tests): fix coveralls and enable build cop (#26) --- .../bigquery-storage/.kokoro/samples-test.sh | 11 ++++++++ .../bigquery-storage/.kokoro/system-test.sh | 12 ++++++++ handwritten/bigquery-storage/.kokoro/test.sh | 11 ++++++++ handwritten/bigquery-storage/.mocharc.js | 28 +++++++++++++++++++ 4 files changed, 62 insertions(+) create mode 100644 handwritten/bigquery-storage/.mocharc.js diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 20e3241c9e9..86e83c9d3da 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -39,6 +39,17 @@ if [ -f samples/package.json ]; then npm link ../ npm install cd .. + # If tests are running against master, configure Build Cop + # to open issues on failures: + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + } + trap cleanup EXIT HUP + fi npm run samples-test fi diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index fc5824e6667..dfae142a231 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -33,6 +33,18 @@ fi npm install +# If tests are running against master, configure Build Cop +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + } + trap cleanup EXIT HUP +fi + npm run system-test # codecov combines coverage across integration and unit tests. Include diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 9db11bb09d6..8d9c2954579 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -21,6 +21,17 @@ export NPM_CONFIG_PREFIX=/home/node/.npm-global cd $(dirname $0)/.. npm install +# If tests are running against master, configure Build Cop +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + } + trap cleanup EXIT HUP +fi npm test # codecov combines coverage across integration and unit tests. Include diff --git a/handwritten/bigquery-storage/.mocharc.js b/handwritten/bigquery-storage/.mocharc.js new file mode 100644 index 00000000000..ff7b34fa5d1 --- /dev/null +++ b/handwritten/bigquery-storage/.mocharc.js @@ -0,0 +1,28 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config From 4058474211f0546f3ee1d6f0614ad7db6a4ca763 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2020 16:12:12 +0000 Subject: [PATCH 015/333] chore: release 1.1.0 (#24) :robot: I have created a release \*beep\* \*boop\* --- ## [1.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.0.0...v1.1.0) (2020-03-06) ### Features * deferred client initialization ([#23](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/23)) ([4741719](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/474171928bbdd5d0fb2eab7be868317f88cd18eb)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 7d8184e95bc..fee320ce64d 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.0.0...v1.1.0) (2020-03-06) + + +### Features + +* deferred client initialization ([#23](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/23)) ([4741719](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/474171928bbdd5d0fb2eab7be868317f88cd18eb)) + ## 1.0.0 (2020-02-29) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 52262d0a14d..396ffa7ec1c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "1.0.0", + "version": "1.1.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 8166ae5064b771108aa17c415e02a7f00e333c4d Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Wed, 18 Mar 2020 12:57:51 -0700 Subject: [PATCH 016/333] docs: mention templates in contributing section of README (#29) --- handwritten/bigquery-storage/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 21c1c791ab3..1c239b6b589 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -213,6 +213,12 @@ More Information: [Google Cloud Platform Launch Stages][launch_stages] Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/CONTRIBUTING.md). +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its template in this +[directory](https://github.com/googleapis/synthtool/tree/master/synthtool/gcp/templates/node_library). + ## License Apache Version 2.0 From 27c0e332888e6683c68a6a364b09e72a8edd37e4 Mon Sep 17 00:00:00 2001 From: Jeff Ching Date: Thu, 19 Mar 2020 08:57:48 -0700 Subject: [PATCH 017/333] chore: remove snippet leading whitespace (#31) --- handwritten/bigquery-storage/README.md | 234 ++++++++++++------------- 1 file changed, 117 insertions(+), 117 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 1c239b6b589..ea3cd1c6216 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -56,125 +56,125 @@ npm install @google-cloud/bigquery-storage ```javascript - // The read stream contains blocks of Avro-encoded bytes. We use the - // 'avsc' library to decode these blocks. Install avsc with the following - // command: npm install avsc - const avro = require('avsc'); - - // See reference documentation at - // https://cloud.google.com/bigquery/docs/reference/storage - const bqStorage = require('@google-cloud/bigquery-storage').v1beta1 - .BigQueryStorageClient; - - const client = new bqStorage(); - - async function bigqueryStorageQuickstart() { - // Get current project ID. The read session is created in this project. - // This project can be different from that which contains the table. - const myProjectId = await client.getProjectId(); - - // This example reads baby name data from the public datasets. - const projectId = 'bigquery-public-data'; - const datasetId = 'usa_names'; - const tableId = 'usa_1910_current'; - - const tableReference = { - projectId, - datasetId, - tableId, - }; - - const parent = `projects/${myProjectId}`; - - /* We limit the output columns to a subset of those allowed in the table, - * and set a simple filter to only report names from the state of - * Washington (WA). - */ - const readOptions = { - selectedFields: ['name', 'number', 'state'], - rowRestriction: 'state = "WA"', - }; - - let tableModifiers = null; - const snapshotSeconds = 0; - - // Set a snapshot time if it's been specified. - if (snapshotSeconds > 0) { - tableModifiers = {snapshotTime: {seconds: snapshotSeconds}}; - } - - // API request. - const request = { - tableReference, - parent, - readOptions, - tableModifiers, - // This API can also deliver data serialized in Apache Arrow format. - // This example leverages Apache Avro. - format: 'AVRO', - /* We use a LIQUID strategy in this example because we only read from a - * single stream. Consider BALANCED if you're consuming multiple streams - * concurrently and want more consistent stream sizes. - */ - shardingStrategy: 'LIQUID', - }; - - const [session] = await client.createReadSession(request); - - const schema = JSON.parse(session.avroSchema.schema); - - const avroType = avro.Type.forSchema(schema); - - /* The offset requested must be less than the last - * row read from ReadRows. Requesting a larger offset is - * undefined. - */ - let offset = 0; - - const readRowsRequest = { - // Optional stream name or offset. Offset requested must be less than the last - // row read from readRows(). Requesting a larger offset is undefined. - readPosition: { - stream: session.streams[0], - offset, - }, - }; - - const names = new Set(); - const states = {}; - - /* We'll use only a single stream for reading data from the table. Because - * of dynamic sharding, this will yield all the rows in the table. However, - * if you wanted to fan out multiple readers you could do so by having a - * reader process each individual stream. +// The read stream contains blocks of Avro-encoded bytes. We use the +// 'avsc' library to decode these blocks. Install avsc with the following +// command: npm install avsc +const avro = require('avsc'); + +// See reference documentation at +// https://cloud.google.com/bigquery/docs/reference/storage +const bqStorage = require('@google-cloud/bigquery-storage').v1beta1 + .BigQueryStorageClient; + +const client = new bqStorage(); + +async function bigqueryStorageQuickstart() { + // Get current project ID. The read session is created in this project. + // This project can be different from that which contains the table. + const myProjectId = await client.getProjectId(); + + // This example reads baby name data from the public datasets. + const projectId = 'bigquery-public-data'; + const datasetId = 'usa_names'; + const tableId = 'usa_1910_current'; + + const tableReference = { + projectId, + datasetId, + tableId, + }; + + const parent = `projects/${myProjectId}`; + + /* We limit the output columns to a subset of those allowed in the table, + * and set a simple filter to only report names from the state of + * Washington (WA). + */ + const readOptions = { + selectedFields: ['name', 'number', 'state'], + rowRestriction: 'state = "WA"', + }; + + let tableModifiers = null; + const snapshotSeconds = 0; + + // Set a snapshot time if it's been specified. + if (snapshotSeconds > 0) { + tableModifiers = {snapshotTime: {seconds: snapshotSeconds}}; + } + + // API request. + const request = { + tableReference, + parent, + readOptions, + tableModifiers, + // This API can also deliver data serialized in Apache Arrow format. + // This example leverages Apache Avro. + format: 'AVRO', + /* We use a LIQUID strategy in this example because we only read from a + * single stream. Consider BALANCED if you're consuming multiple streams + * concurrently and want more consistent stream sizes. */ - client - .readRows(readRowsRequest) - .on('error', console.error) - .on('data', function(data) { - try { - const decodedData = avroType.decode( - data.avroRows.serializedBinaryRows - ); - - names.add(decodedData.value.name); - - if (!states[decodedData.value.state]) { - states[decodedData.value.state] = true; - } - - offset = decodedData.offset; - } catch (error) { - console.log(error); - } - }) - .on('end', function() { - console.log( - `Got ${names.size} unique names in states: ${Object.keys(states)}` + shardingStrategy: 'LIQUID', + }; + + const [session] = await client.createReadSession(request); + + const schema = JSON.parse(session.avroSchema.schema); + + const avroType = avro.Type.forSchema(schema); + + /* The offset requested must be less than the last + * row read from ReadRows. Requesting a larger offset is + * undefined. + */ + let offset = 0; + + const readRowsRequest = { + // Optional stream name or offset. Offset requested must be less than the last + // row read from readRows(). Requesting a larger offset is undefined. + readPosition: { + stream: session.streams[0], + offset, + }, + }; + + const names = new Set(); + const states = {}; + + /* We'll use only a single stream for reading data from the table. Because + * of dynamic sharding, this will yield all the rows in the table. However, + * if you wanted to fan out multiple readers you could do so by having a + * reader process each individual stream. + */ + client + .readRows(readRowsRequest) + .on('error', console.error) + .on('data', function(data) { + try { + const decodedData = avroType.decode( + data.avroRows.serializedBinaryRows ); - console.log(`Last offset: ${offset}`); - }); - } + + names.add(decodedData.value.name); + + if (!states[decodedData.value.state]) { + states[decodedData.value.state] = true; + } + + offset = decodedData.offset; + } catch (error) { + console.log(error); + } + }) + .on('end', function() { + console.log( + `Got ${names.size} unique names in states: ${Object.keys(states)}` + ); + console.log(`Last offset: ${offset}`); + }); +} ``` From 9c08b581deb7b8e645e4aec2420e76f35f98ae6b Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Mon, 23 Mar 2020 18:29:24 -0700 Subject: [PATCH 018/333] docs: document version support goals (#36) --- handwritten/bigquery-storage/README.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index ea3cd1c6216..40069706e54 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -191,6 +191,27 @@ has instructions for running the samples. +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. + +Client libraries targetting some end-of-life versions of Node.js are available, and +can be installed via npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. + +_Legacy Node.js versions are supported as a best effort:_ + +* Legacy versions will not be tested in continuous integration. +* Some security patches may not be able to be backported. +* Dependencies will not be kept up-to-date, and features will not be backported. + +#### Legacy tags available + +* `legacy-8`: install client libraries from this dist-tag for versions + compatible with Node.js 8. + ## Versioning This library follows [Semantic Versioning](http://semver.org/). From 5880986ca9d781531e1d27a81f02e8804ac411f3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 25 Mar 2020 01:12:36 -0700 Subject: [PATCH 019/333] chore: regenerate the code (#35) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR was generated using Autosynth. :rainbow:
Log from Synthtool ``` 2020-03-22 05:01:05,430 synthtool > Executing /tmpfs/src/git/autosynth/working_repo/synth.py. 2020-03-22 05:01:05,486 synthtool > Ensuring dependencies. 2020-03-22 05:01:05,492 synthtool > Cloning googleapis. 2020-03-22 05:01:05,861 synthtool > Pulling Docker image: gapic-generator-typescript:latest latest: Pulling from gapic-images/gapic-generator-typescript Digest: sha256:3762b8bcba247ef4d020ffc7043e2881a20b5fab0ffd98d542f365d3f3a3829d Status: Image is up to date for gcr.io/gapic-images/gapic-generator-typescript:latest 2020-03-22 05:01:06,769 synthtool > Generating code for: google/cloud/bigquery/storage/v1beta1. 2020-03-22 05:01:07,962 synthtool > Generated code into /tmpfs/tmp/tmp1xyt53dg. .eslintignore .eslintrc.yml .github/ISSUE_TEMPLATE/bug_report.md .github/ISSUE_TEMPLATE/feature_request.md .github/ISSUE_TEMPLATE/support_request.md .github/PULL_REQUEST_TEMPLATE.md .github/publish.yml .github/release-please.yml .github/workflows/ci.yaml .kokoro/common.cfg .kokoro/continuous/node10/common.cfg .kokoro/continuous/node10/docs.cfg .kokoro/continuous/node10/lint.cfg .kokoro/continuous/node10/samples-test.cfg .kokoro/continuous/node10/system-test.cfg .kokoro/continuous/node10/test.cfg .kokoro/continuous/node12/common.cfg .kokoro/continuous/node12/test.cfg .kokoro/continuous/node8/common.cfg .kokoro/continuous/node8/test.cfg .kokoro/docs.sh .kokoro/lint.sh .kokoro/presubmit/node10/common.cfg .kokoro/presubmit/node10/docs.cfg .kokoro/presubmit/node10/lint.cfg .kokoro/presubmit/node10/samples-test.cfg .kokoro/presubmit/node10/system-test.cfg .kokoro/presubmit/node10/test.cfg .kokoro/presubmit/node12/common.cfg .kokoro/presubmit/node12/test.cfg .kokoro/presubmit/node8/common.cfg .kokoro/presubmit/node8/test.cfg .kokoro/presubmit/windows/common.cfg .kokoro/presubmit/windows/test.cfg .kokoro/publish.sh .kokoro/release/docs.cfg .kokoro/release/docs.sh .kokoro/release/publish.cfg .kokoro/samples-test.sh .kokoro/system-test.sh .kokoro/test.bat .kokoro/test.sh .kokoro/trampoline.sh .mocharc.js .nycrc .prettierignore .prettierrc CODE_OF_CONDUCT.md CONTRIBUTING.md LICENSE README.md codecov.yaml renovate.json samples/README.md npm WARN npm npm does not support Node.js v12.16.1 npm WARN npm You should probably upgrade to a newer version of node as we npm WARN npm can't make any promises that npm will work with this version. npm WARN npm Supported releases of Node.js are the latest release of 6, 8, 9, 10, 11. npm WARN npm You can find the latest version at https://nodejs.org/ > protobufjs@6.8.9 postinstall /tmpfs/src/git/autosynth/working_repo/node_modules/protobufjs > node scripts/postinstall > @google-cloud/bigquery-storage@1.1.0 prepare /tmpfs/src/git/autosynth/working_repo > npm run compile-protos && npm run compile npm WARN npm npm does not support Node.js v12.16.1 npm WARN npm You should probably upgrade to a newer version of node as we npm WARN npm can't make any promises that npm will work with this version. npm WARN npm Supported releases of Node.js are the latest release of 6, 8, 9, 10, 11. npm WARN npm You can find the latest version at https://nodejs.org/ > @google-cloud/bigquery-storage@1.1.0 compile-protos /tmpfs/src/git/autosynth/working_repo > compileProtos src installing semver@^5.5.0 installing uglify-js@^3.3.25 installing espree@^3.5.4 installing escodegen@^1.9.1 npm WARN npm npm does not support Node.js v12.16.1 npm WARN npm You should probably upgrade to a newer version of node as we npm WARN npm can't make any promises that npm will work with this version. npm WARN npm Supported releases of Node.js are the latest release of 6, 8, 9, 10, 11. npm WARN npm You can find the latest version at https://nodejs.org/ > @google-cloud/bigquery-storage@1.1.0 compile /tmpfs/src/git/autosynth/working_repo > tsc -p . && cp -r protos build/ npm notice created a lockfile as package-lock.json. You should commit this file. npm WARN optional SKIPPING OPTIONAL DEPENDENCY: fsevents@~2.1.1 (node_modules/chokidar/node_modules/fsevents): npm WARN notsup SKIPPING OPTIONAL DEPENDENCY: Unsupported platform for fsevents@2.1.2: wanted {"os":"darwin","arch":"any"} (current: {"os":"linux","arch":"x64"}) npm WARN optional SKIPPING OPTIONAL DEPENDENCY: fsevents@^1.2.7 (node_modules/watchpack/node_modules/chokidar/node_modules/fsevents): npm WARN notsup SKIPPING OPTIONAL DEPENDENCY: Unsupported platform for fsevents@1.2.12: wanted {"os":"darwin","arch":"any"} (current: {"os":"linux","arch":"x64"}) npm WARN optional SKIPPING OPTIONAL DEPENDENCY: abbrev@1.1.1 (node_modules/watchpack/node_modules/fsevents/node_modules/abbrev): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/abbrev' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.abbrev.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: ansi-regex@2.1.1 (node_modules/watchpack/node_modules/fsevents/node_modules/ansi-regex): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/ansi-regex' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.ansi-regex.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: aproba@1.2.0 (node_modules/watchpack/node_modules/fsevents/node_modules/aproba): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/aproba' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.aproba.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: balanced-match@1.0.0 (node_modules/watchpack/node_modules/fsevents/node_modules/balanced-match): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/balanced-match' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.balanced-match.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: chownr@1.1.4 (node_modules/watchpack/node_modules/fsevents/node_modules/chownr): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/chownr' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.chownr.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: code-point-at@1.1.0 (node_modules/watchpack/node_modules/fsevents/node_modules/code-point-at): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/code-point-at' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.code-point-at.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: concat-map@0.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/concat-map): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/concat-map' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.concat-map.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: console-control-strings@1.1.0 (node_modules/watchpack/node_modules/fsevents/node_modules/console-control-strings): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/console-control-strings' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.console-control-strings.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: core-util-is@1.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/core-util-is): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/core-util-is' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.core-util-is.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: deep-extend@0.6.0 (node_modules/watchpack/node_modules/fsevents/node_modules/deep-extend): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/deep-extend' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.deep-extend.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: delegates@1.0.0 (node_modules/watchpack/node_modules/fsevents/node_modules/delegates): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/delegates' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.delegates.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: detect-libc@1.0.3 (node_modules/watchpack/node_modules/fsevents/node_modules/detect-libc): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/detect-libc' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.detect-libc.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: fs.realpath@1.0.0 (node_modules/watchpack/node_modules/fsevents/node_modules/fs.realpath): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/fs.realpath' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.fs.realpath.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: has-unicode@2.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/has-unicode): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/has-unicode' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.has-unicode.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: inherits@2.0.4 (node_modules/watchpack/node_modules/fsevents/node_modules/inherits): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/inherits' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.inherits.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: ini@1.3.5 (node_modules/watchpack/node_modules/fsevents/node_modules/ini): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/ini' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.ini.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: isarray@1.0.0 (node_modules/watchpack/node_modules/fsevents/node_modules/isarray): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/isarray' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.isarray.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: minimist@1.2.5 (node_modules/watchpack/node_modules/fsevents/node_modules/minimist): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/minimist' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.minimist.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: ms@2.1.2 (node_modules/watchpack/node_modules/fsevents/node_modules/ms): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/ms' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.ms.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: npm-normalize-package-bin@1.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/npm-normalize-package-bin): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/npm-normalize-package-bin' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.npm-normalize-package-bin.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: number-is-nan@1.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/number-is-nan): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/number-is-nan' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.number-is-nan.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: object-assign@4.1.1 (node_modules/watchpack/node_modules/fsevents/node_modules/object-assign): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/object-assign' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.object-assign.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: os-homedir@1.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/os-homedir): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/os-homedir' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.os-homedir.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: os-tmpdir@1.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/os-tmpdir): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/os-tmpdir' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.os-tmpdir.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: path-is-absolute@1.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/path-is-absolute): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/path-is-absolute' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.path-is-absolute.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: process-nextick-args@2.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/process-nextick-args): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/process-nextick-args' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.process-nextick-args.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: safe-buffer@5.1.2 (node_modules/watchpack/node_modules/fsevents/node_modules/safe-buffer): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/safe-buffer' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.safe-buffer.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: safer-buffer@2.1.2 (node_modules/watchpack/node_modules/fsevents/node_modules/safer-buffer): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/safer-buffer' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.safer-buffer.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: sax@1.2.4 (node_modules/watchpack/node_modules/fsevents/node_modules/sax): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/sax' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.sax.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: semver@5.7.1 (node_modules/watchpack/node_modules/fsevents/node_modules/semver): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/semver' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.semver.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: set-blocking@2.0.0 (node_modules/watchpack/node_modules/fsevents/node_modules/set-blocking): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/set-blocking' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.set-blocking.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: signal-exit@3.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/signal-exit): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/signal-exit' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.signal-exit.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: strip-json-comments@2.0.1 (node_modules/watchpack/node_modules/fsevents/node_modules/strip-json-comments): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/strip-json-comments' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.strip-json-comments.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: util-deprecate@1.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/util-deprecate): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/util-deprecate' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.util-deprecate.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: wrappy@1.0.2 (node_modules/watchpack/node_modules/fsevents/node_modules/wrappy): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/wrappy' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.wrappy.DELETE' npm WARN optional SKIPPING OPTIONAL DEPENDENCY: yallist@3.1.1 (node_modules/watchpack/node_modules/fsevents/node_modules/yallist): npm WARN enoent SKIPPING OPTIONAL DEPENDENCY: ENOENT: no such file or directory, rename '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/yallist' -> '/tmpfs/src/git/autosynth/working_repo/node_modules/watchpack/node_modules/fsevents/node_modules/.yallist.DELETE' added 966 packages from 454 contributors and audited 6746 packages in 28.411s found 0 vulnerabilities npm WARN npm npm does not support Node.js v12.16.1 npm WARN npm You should probably upgrade to a newer version of node as we npm WARN npm can't make any promises that npm will work with this version. npm WARN npm Supported releases of Node.js are the latest release of 6, 8, 9, 10, 11. npm WARN npm You can find the latest version at https://nodejs.org/ > @google-cloud/bigquery-storage@1.1.0 fix /tmpfs/src/git/autosynth/working_repo > gts fix && eslint samples --fix /tmpfs/src/git/autosynth/working_repo/samples/quickstart.js 27:24 error "avsc" is not found node/no-missing-require 31:29 error "@google-cloud/bigquery-storage" is not found node/no-missing-require /tmpfs/src/git/autosynth/working_repo/samples/test/quickstart.js 21:26 error "chai" is not found node/no-missing-require ✖ 3 problems (3 errors, 0 warnings) npm ERR! code ELIFECYCLE npm ERR! errno 1 npm ERR! @google-cloud/bigquery-storage@1.1.0 fix: `gts fix && eslint samples --fix` npm ERR! Exit status 1 npm ERR! npm ERR! Failed at the @google-cloud/bigquery-storage@1.1.0 fix script. npm ERR! This is probably not a problem with npm. There is likely additional logging output above. npm ERR! A complete log of this run can be found in: npm ERR! /home/kbuilder/.npm/_logs/2020-03-22T12_01_43_693Z-debug.log 2020-03-22 05:01:46,516 synthtool > Wrote metadata to synth.metadata. ```
--- handwritten/bigquery-storage/.jsdoc.js | 2 +- handwritten/bigquery-storage/src/index.ts | 2 +- .../src/v1beta1/big_query_storage_client.ts | 12 +++++++++--- .../bigquery-storage/src/v1beta1/index.ts | 2 +- handwritten/bigquery-storage/synth.metadata | 16 ++++++++-------- .../system-test/fixtures/sample/src/index.js | 2 +- .../system-test/fixtures/sample/src/index.ts | 2 +- .../bigquery-storage/system-test/install.ts | 2 +- .../test/gapic-big_query_storage-v1beta1.ts | 2 +- handwritten/bigquery-storage/webpack.config.js | 2 +- 10 files changed, 25 insertions(+), 19 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 30eccb20f81..e8a4b174718 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index b8de7b5c834..8c0e36d5f0a 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index e85d0072dc2..febe76cd7fa 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,7 +39,12 @@ const version = require('../../../package.json').version; * @memberof v1beta1 */ export class BigQueryStorageClient { - private _descriptors: Descriptors = {page: {}, stream: {}, longrunning: {}}; + private _descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; private _innerApiCalls: {[name: string]: Function}; private _pathTemplates: {[name: string]: gax.PathTemplate}; private _terminated = false; @@ -223,7 +228,8 @@ export class BigQueryStorageClient { if (this._terminated) { return Promise.reject('The client has already been closed.'); } - return stub[methodName].apply(stub, args); + const func = stub[methodName]; + return func.apply(stub, args); }, (err: Error | null | undefined) => () => { throw err; diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index 7346292b381..9b3c5a12d86 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 4d0b0a1a89b..d99e302fbd4 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,20 +1,20 @@ { - "updateTime": "2020-03-05T23:02:10.150448Z", + "updateTime": "2020-03-22T12:01:46.515920Z", "sources": [ { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f0b581b5bdf803e45201ecdb3688b60e381628a8", - "internalRef": "299181282", - "log": "f0b581b5bdf803e45201ecdb3688b60e381628a8\nfix: recommendationengine/v1beta1 update some comments\n\nPiperOrigin-RevId: 299181282\n\n10e9a0a833dc85ff8f05b2c67ebe5ac785fe04ff\nbuild: add generated BUILD file for Routes Preferred API\n\nPiperOrigin-RevId: 299164808\n\n86738c956a8238d7c77f729be78b0ed887a6c913\npublish v1p1beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299152383\n\n73d9f2ad4591de45c2e1f352bc99d70cbd2a6d95\npublish v1: update with absolute address in comments\n\nPiperOrigin-RevId: 299147194\n\nd2158f24cb77b0b0ccfe68af784c6a628705e3c6\npublish v1beta2: update with absolute address in comments\n\nPiperOrigin-RevId: 299147086\n\n7fca61292c11b4cd5b352cee1a50bf88819dd63b\npublish v1p2beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146903\n\n583b7321624736e2c490e328f4b1957335779295\npublish v1p3beta1: update with absolute address in comments\n\nPiperOrigin-RevId: 299146674\n\n" + "sha": "0be7105dc52590fa9a24e784052298ae37ce53aa", + "internalRef": "302154871", + "log": "0be7105dc52590fa9a24e784052298ae37ce53aa\nAdd BUILD.bazel file to asset/v1p1beta1\n\nPiperOrigin-RevId: 302154871\n\n6c248fd13e8543f8d22cbf118d978301a9fbe2a8\nAdd missing resource annotations and additional_bindings to dialogflow v2 API.\n\nPiperOrigin-RevId: 302063117\n\n9a3a7f33be9eeacf7b3e98435816b7022d206bd7\nChange the service name from \"chromeos-moblab.googleapis.com\" to \"chromeosmoblab.googleapis.com\"\n\nPiperOrigin-RevId: 302060989\n\n98a339237577e3de26cb4921f75fb5c57cc7a19f\nfeat: devtools/build/v1 publish client library config annotations\n\n* add details field to some of the BuildEvents\n* add final_invocation_id and build_tool_exit_code fields to BuildStatus\n\nPiperOrigin-RevId: 302044087\n\ncfabc98c6bbbb22d1aeaf7612179c0be193b3a13\nfeat: home/graph/v1 publish client library config annotations & comment updates\n\nThis change includes adding the client library configuration annotations, updated proto comments, and some client library configuration files.\n\nPiperOrigin-RevId: 302042647\n\nc8c8c0bd15d082db9546253dbaad1087c7a9782c\nchore: use latest gapic-generator in bazel WORKSPACE.\nincluding the following commits from gapic-generator:\n- feat: take source protos in all sub-packages (#3144)\n\nPiperOrigin-RevId: 301843591\n\ne4daf5202ea31cb2cb6916fdbfa9d6bd771aeb4c\nAdd bazel file for v1 client lib generation\n\nPiperOrigin-RevId: 301802926\n\n275fbcce2c900278d487c33293a3c7e1fbcd3a34\nfeat: pubsub/v1 add an experimental filter field to Subscription\n\nPiperOrigin-RevId: 301661567\n\nf2b18cec51d27c999ad30011dba17f3965677e9c\nFix: UpdateBackupRequest.backup is a resource, not a resource reference - remove annotation.\n\nPiperOrigin-RevId: 301636171\n\n800384063ac93a0cac3a510d41726fa4b2cd4a83\nCloud Billing Budget API v1beta1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301634389\n\n0cc6c146b660db21f04056c3d58a4b752ee445e3\nCloud Billing Budget API v1alpha1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301630018\n\nff2ea00f69065585c3ac0993c8b582af3b6fc215\nFix: Add resource definition for a parent of InspectTemplate which was otherwise missing.\n\nPiperOrigin-RevId: 301623052\n\n55fa441c9daf03173910760191646399338f2b7c\nAdd proto definition for AccessLevel, AccessPolicy, and ServicePerimeter.\n\nPiperOrigin-RevId: 301620844\n\ne7b10591c5408a67cf14ffafa267556f3290e262\nCloud Bigtable Managed Backup service and message proto files.\n\nPiperOrigin-RevId: 301585144\n\nd8e226f702f8ddf92915128c9f4693b63fb8685d\nfeat: Add time-to-live in a queue for builds\n\nPiperOrigin-RevId: 301579876\n\n430375af011f8c7a5174884f0d0e539c6ffa7675\ndocs: add missing closing backtick\n\nPiperOrigin-RevId: 301538851\n\n0e9f1f60ded9ad1c2e725e37719112f5b487ab65\nbazel: Use latest release of gax_java\n\nPiperOrigin-RevId: 301480457\n\n5058c1c96d0ece7f5301a154cf5a07b2ad03a571\nUpdate GAPIC v2 with batching parameters for Logging API\n\nPiperOrigin-RevId: 301443847\n\n64ab9744073de81fec1b3a6a931befc8a90edf90\nFix: Introduce location-based organization/folder/billing-account resources\nChore: Update copyright years\n\nPiperOrigin-RevId: 301373760\n\n23d5f09e670ebb0c1b36214acf78704e2ecfc2ac\nUpdate field_behavior annotations in V1 and V2.\n\nPiperOrigin-RevId: 301337970\n\nb2cf37e7fd62383a811aa4d54d013ecae638851d\nData Catalog V1 API\n\nPiperOrigin-RevId: 301282503\n\n1976b9981e2900c8172b7d34b4220bdb18c5db42\nCloud DLP api update. Adds missing fields to Finding and adds support for hybrid jobs.\n\nPiperOrigin-RevId: 301205325\n\nae78682c05e864d71223ce22532219813b0245ac\nfix: several sample code blocks in comments are now properly indented for markdown\n\nPiperOrigin-RevId: 301185150\n\ndcd171d04bda5b67db13049320f97eca3ace3731\nPublish Media Translation API V1Beta1\n\nPiperOrigin-RevId: 301180096\n\nff1713453b0fbc5a7544a1ef6828c26ad21a370e\nAdd protos and BUILD rules for v1 API.\n\nPiperOrigin-RevId: 301179394\n\n8386761d09819b665b6a6e1e6d6ff884bc8ff781\nfeat: chromeos/modlab publish protos and config for Chrome OS Moblab API.\n\nPiperOrigin-RevId: 300843960\n\nb2e2bc62fab90e6829e62d3d189906d9b79899e4\nUpdates to GCS gRPC API spec:\n\n1. Changed GetIamPolicy and TestBucketIamPermissions to use wrapper messages around google.iam.v1 IAM requests messages, and added CommonRequestParams. This lets us support RequesterPays buckets.\n2. Added a metadata field to GetObjectMediaResponse, to support resuming an object media read safely (by extracting the generation of the object being read, and using it in the resumed read request).\n\nPiperOrigin-RevId: 300817706\n\n7fd916ce12335cc9e784bb9452a8602d00b2516c\nAdd deprecated_collections field for backward-compatiblity in PHP and monolith-generated Python and Ruby clients.\n\nGenerate TopicName class in Java which covers the functionality of both ProjectTopicName and DeletedTopicName. Introduce breaking changes to be fixed by synth.py.\n\nDelete default retry parameters.\n\nRetry codes defs can be deleted once # https://github.com/googleapis/gapic-generator/issues/3137 is fixed.\n\nPiperOrigin-RevId: 300813135\n\n047d3a8ac7f75383855df0166144f891d7af08d9\nfix!: google/rpc refactor ErrorInfo.type to ErrorInfo.reason and comment updates.\n\nPiperOrigin-RevId: 300773211\n\nfae4bb6d5aac52aabe5f0bb4396466c2304ea6f6\nAdding RetryPolicy to pubsub.proto\n\nPiperOrigin-RevId: 300769420\n\n7d569be2928dbd72b4e261bf9e468f23afd2b950\nAdding additional protocol buffer annotations to v3.\n\nPiperOrigin-RevId: 300718800\n\n13942d1a85a337515040a03c5108993087dc0e4f\nAdd logging protos for Recommender v1.\n\nPiperOrigin-RevId: 300689896\n\na1a573c3eecfe2c404892bfa61a32dd0c9fb22b6\nfix: change go package to use cloud.google.com/go/maps\n\nPiperOrigin-RevId: 300661825\n\nc6fbac11afa0c7ab2972d9df181493875c566f77\nfeat: publish documentai/v1beta2 protos\n\nPiperOrigin-RevId: 300656808\n\n5202a9e0d9903f49e900f20fe5c7f4e42dd6588f\nProtos for v1beta1 release of Cloud Security Center Settings API\n\nPiperOrigin-RevId: 300580858\n\n83518e18655d9d4ac044acbda063cc6ecdb63ef8\nAdds gapic.yaml file and BUILD.bazel file.\n\nPiperOrigin-RevId: 300554200\n\n836c196dc8ef8354bbfb5f30696bd3477e8db5e2\nRegenerate recommender v1beta1 gRPC ServiceConfig file for Insights methods.\n\nPiperOrigin-RevId: 300549302\n\n34a5450c591b6be3d6566f25ac31caa5211b2f3f\nIncreases the default timeout from 20s to 30s for MetricService\n\nPiperOrigin-RevId: 300474272\n\n5d8bffe87cd01ba390c32f1714230e5a95d5991d\nfeat: use the latest gapic-generator in WORKSPACE for bazel build.\n\nPiperOrigin-RevId: 300461878\n\nd631c651e3bcfac5d371e8560c27648f7b3e2364\nUpdated the GAPIC configs to include parameters for Backups APIs.\n\nPiperOrigin-RevId: 300443402\n\n678afc7055c1adea9b7b54519f3bdb228013f918\nAdding Game Servers v1beta API.\n\nPiperOrigin-RevId: 300433218\n\n80d2bd2c652a5e213302041b0620aff423132589\nEnable proto annotation and gapic v2 for talent API.\n\nPiperOrigin-RevId: 300393997\n\n85e454be7a353f7fe1bf2b0affb753305785b872\ndocs(google/maps/roads): remove mention of nonexported api\n\nPiperOrigin-RevId: 300367734\n\nbf839ae632e0f263a729569e44be4b38b1c85f9c\nAdding protocol buffer annotations and updated config info for v1 and v2.\n\nPiperOrigin-RevId: 300276913\n\n309b899ca18a4c604bce63882a161d44854da549\nPublish `Backup` APIs and protos.\n\nPiperOrigin-RevId: 300246038\n\neced64c3f122421350b4aca68a28e89121d20db8\nadd PHP client libraries\n\nPiperOrigin-RevId: 300193634\n\n7727af0e39df1ae9ad715895c8576d7b65cf6c6d\nfeat: use the latest gapic-generator and protoc-java-resource-name-plugin in googleapis/WORKSPACE.\n\nPiperOrigin-RevId: 300188410\n\n2a25aa351dd5b5fe14895266aff5824d90ce757b\nBreaking change: remove the ProjectOrTenant resource and its references.\n\nPiperOrigin-RevId: 300182152\n\na499dbb28546379415f51803505cfb6123477e71\nUpdate web risk v1 gapic config and BUILD file.\n\nPiperOrigin-RevId: 300152177\n\n52701da10fec2a5f9796e8d12518c0fe574488fe\nFix: apply appropriate namespace/package options for C#, PHP and Ruby.\n\nPiperOrigin-RevId: 300123508\n\n365c029b8cdb63f7751b92ab490f1976e616105c\nAdd CC targets to the kms protos.\n\nThese are needed by go/tink.\n\nPiperOrigin-RevId: 300038469\n\n4ba9aa8a4a1413b88dca5a8fa931824ee9c284e6\nExpose logo recognition API proto for GA.\n\nPiperOrigin-RevId: 299971671\n\n1c9fc2c9e03dadf15f16b1c4f570955bdcebe00e\nAdding ruby_package option to accessapproval.proto for the Ruby client libraries generation.\n\nPiperOrigin-RevId: 299955924\n\n1cc6f0a7bfb147e6f2ede911d9b01e7a9923b719\nbuild(google/maps/routes): generate api clients\n\nPiperOrigin-RevId: 299955905\n\n29a47c965aac79e3fe8e3314482ca0b5967680f0\nIncrease timeout to 1hr for method `dropRange` in bigtable/admin/v2, which is\nsynced with the timeout setting in gapic_yaml.\n\nPiperOrigin-RevId: 299917154\n\n8f631c4c70a60a9c7da3749511ee4ad432b62898\nbuild(google/maps/roads/v1op): move go to monorepo pattern\n\nPiperOrigin-RevId: 299885195\n\nd66816518844ebbf63504c9e8dfc7133921dd2cd\nbuild(google/maps/roads/v1op): Add bazel build files to generate clients.\n\nPiperOrigin-RevId: 299851148\n\naf7dff701fabe029672168649c62356cf1bb43d0\nAdd LogPlayerReports and LogImpressions to Playable Locations service\n\nPiperOrigin-RevId: 299724050\n\nb6927fca808f38df32a642c560082f5bf6538ced\nUpdate BigQuery Connection API v1beta1 proto: added credential to CloudSqlProperties.\n\nPiperOrigin-RevId: 299503150\n\n91e1fb5ef9829c0c7a64bfa5bde330e6ed594378\nchore: update protobuf (protoc) version to 3.11.2\n\nPiperOrigin-RevId: 299404145\n\n30e36b4bee6749c4799f4fc1a51cc8f058ba167d\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 299399890\n\nffbb493674099f265693872ae250711b2238090c\nfeat: cloudbuild/v1 add new fields and annotate OUTPUT_OUT fields.\n\nPiperOrigin-RevId: 299397780\n\nbc973a15818e00c19e121959832676e9b7607456\nbazel: Fix broken common dependency\n\nPiperOrigin-RevId: 299397431\n\n71094a343e3b962e744aa49eb9338219537474e4\nchore: bigtable/admin/v2 publish retry config\n\nPiperOrigin-RevId: 299391875\n\n8f488efd7bda33885cb674ddd023b3678c40bd82\nfeat: Migrate logging to GAPIC v2; release new features.\n\nIMPORTANT: This is a breaking change for client libraries\nin all languages.\n\nCommitter: @lukesneeringer, @jskeet\nPiperOrigin-RevId: 299370279\n\n007605bf9ad3a1fd775014ebefbf7f1e6b31ee71\nUpdate API for bigqueryreservation v1beta1.\n- Adds flex capacity commitment plan to CapacityCommitment.\n- Adds methods for getting and updating BiReservations.\n- Adds methods for updating/splitting/merging CapacityCommitments.\n\nPiperOrigin-RevId: 299368059\n\n" } }, { - "template": { - "name": "node_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "7e98e1609c91082f4eeb63b530c6468aefd18cfd" } } ], diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index e1637d5246a..72dceac69f8 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index f0ce2eb8a76..c0e08eebba4 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index c9aa74ec221..c4d80e9c0c8 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts index 28001512c10..8bc4a193677 100644 --- a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js index ad9db41c80c..b4d07b13300 100644 --- a/handwritten/bigquery-storage/webpack.config.js +++ b/handwritten/bigquery-storage/webpack.config.js @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 0496337cb63a6d26c6fe545445811ecc14b4ff58 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Tue, 31 Mar 2020 13:44:19 -0700 Subject: [PATCH 020/333] feat!: drop node8 support (#39) BREAKING CHANGE: The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM. --- handwritten/bigquery-storage/.eslintrc.json | 3 + handwritten/bigquery-storage/.eslintrc.yml | 15 - handwritten/bigquery-storage/.prettierrc | 8 - handwritten/bigquery-storage/.prettierrc.js | 17 + handwritten/bigquery-storage/package.json | 24 +- handwritten/bigquery-storage/src/index.ts | 2 - .../src/v1beta1/big_query_storage_client.ts | 272 +++--- handwritten/bigquery-storage/synth.metadata | 20 +- .../system-test/fixtures/sample/src/index.js | 1 - .../test/gapic-big_query_storage-v1beta1.ts | 410 --------- .../test/gapic_big_query_storage_v1beta1.ts | 865 ++++++++++++++++++ .../bigquery-storage/webpack.config.js | 12 +- 12 files changed, 1065 insertions(+), 584 deletions(-) create mode 100644 handwritten/bigquery-storage/.eslintrc.json delete mode 100644 handwritten/bigquery-storage/.eslintrc.yml delete mode 100644 handwritten/bigquery-storage/.prettierrc create mode 100644 handwritten/bigquery-storage/.prettierrc.js delete mode 100644 handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts diff --git a/handwritten/bigquery-storage/.eslintrc.json b/handwritten/bigquery-storage/.eslintrc.json new file mode 100644 index 00000000000..78215349546 --- /dev/null +++ b/handwritten/bigquery-storage/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/handwritten/bigquery-storage/.eslintrc.yml b/handwritten/bigquery-storage/.eslintrc.yml deleted file mode 100644 index 73eeec27612..00000000000 --- a/handwritten/bigquery-storage/.eslintrc.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -extends: - - 'eslint:recommended' - - 'plugin:node/recommended' - - prettier -plugins: - - node - - prettier -rules: - prettier/prettier: error - block-scoped-var: error - eqeqeq: error - no-warning-comments: warn - no-var: error - prefer-const: error diff --git a/handwritten/bigquery-storage/.prettierrc b/handwritten/bigquery-storage/.prettierrc deleted file mode 100644 index df6eac07446..00000000000 --- a/handwritten/bigquery-storage/.prettierrc +++ /dev/null @@ -1,8 +0,0 @@ ---- -bracketSpacing: false -printWidth: 80 -semi: true -singleQuote: true -tabWidth: 2 -trailingComma: es5 -useTabs: false diff --git a/handwritten/bigquery-storage/.prettierrc.js b/handwritten/bigquery-storage/.prettierrc.js new file mode 100644 index 00000000000..08cba3775be --- /dev/null +++ b/handwritten/bigquery-storage/.prettierrc.js @@ -0,0 +1,17 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 396ffa7ec1c..930799f438a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -26,31 +26,33 @@ "samples-test": "cd samples/ && npm link ../ && npm test && cd ../" }, "dependencies": { - "google-gax": "^1.14.1" + "google-gax": "^2.0.1" }, "devDependencies": { "@types/mocha": "^7.0.1", "@types/node": "^13.7.1", + "@types/sinon": "^7.5.2", "c8": "^7.1.0", - "gts": "^1.1.2", + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.0", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", + "gts": "2.0.0-alpha.9", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", "linkinator": "^2.0.1", "mocha": "^7.0.1", - "pack-n-play": "^1.0.0-2", "null-loader": "^3.0.0", + "pack-n-play": "^1.0.0-2", + "prettier": "^1.19.1", + "sinon": "^9.0.1", "ts-loader": "^6.2.1", - "typescript": "~3.7.5", + "typescript": "^3.8.3", "webpack": "^4.41.6", - "webpack-cli": "^3.3.11", - "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-plugin-node": "^11.0.0", - "eslint-plugin-prettier": "^3.1.2", - "prettier": "^1.19.1" + "webpack-cli": "^3.3.11" }, "engines": { - "node": ">=8.13.0" + "node": ">=10" } } diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 8c0e36d5f0a..a3e89d46812 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -19,8 +19,6 @@ import * as v1beta1 from './v1beta1'; const BigQueryStorageClient = v1beta1.BigQueryStorageClient; export {v1beta1, BigQueryStorageClient}; -// For compatibility with JavaScript libraries we need to provide this default export: -// tslint:disable-next-line no-default-export export default {v1beta1, BigQueryStorageClient}; import * as protos from '../protos/protos'; export {protos}; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index febe76cd7fa..98cac03fa98 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -18,7 +18,7 @@ import * as gax from 'google-gax'; import { - APICallback, + GaxCall, Callback, CallOptions, Descriptors, @@ -26,7 +26,7 @@ import { } from 'google-gax'; import * as path from 'path'; -import * as protosTypes from '../../protos/protos'; +import * as protos from '../../protos/protos'; import * as gapicConfig from './big_query_storage_client_config.json'; const version = require('../../../package.json').version; @@ -39,14 +39,6 @@ const version = require('../../../package.json').version; * @memberof v1beta1 */ export class BigQueryStorageClient { - private _descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - private _innerApiCalls: {[name: string]: Function}; - private _pathTemplates: {[name: string]: gax.PathTemplate}; private _terminated = false; private _opts: ClientOptions; private _gaxModule: typeof gax | typeof gax.fallback; @@ -54,6 +46,14 @@ export class BigQueryStorageClient { private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; bigQueryStorageStub?: Promise<{[name: string]: Function}>; /** @@ -145,13 +145,16 @@ export class BigQueryStorageClient { 'protos.json' ); this._protos = this._gaxGrpc.loadProto( - opts.fallback ? require('../../protos/protos.json') : nodejsProtoPath + opts.fallback + ? // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../../protos/protos.json') + : nodejsProtoPath ); // This API contains "path templates"; forward-slash-separated // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. - this._pathTemplates = { + this.pathTemplates = { readSessionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}' ), @@ -162,7 +165,7 @@ export class BigQueryStorageClient { // Some of the methods on this service provide streaming responses. // Provide descriptors for these. - this._descriptors.stream = { + this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( gax.StreamType.SERVER_STREAMING ), @@ -179,7 +182,7 @@ export class BigQueryStorageClient { // Set up a dictionary of "inner API calls"; the core implementation // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. - this._innerApiCalls = {}; + this.innerApiCalls = {}; } /** @@ -206,7 +209,7 @@ export class BigQueryStorageClient { ? (this._protos as protobuf.Root).lookupService( 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' ) - : // tslint:disable-next-line no-any + : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1beta1 .BigQueryStorage, this._opts @@ -221,9 +224,8 @@ export class BigQueryStorageClient { 'finalizeStream', 'splitReadStream', ]; - for (const methodName of bigQueryStorageStubMethods) { - const innerCallPromise = this.bigQueryStorageStub.then( + const callPromise = this.bigQueryStorageStub.then( stub => (...args: Array<{}>) => { if (this._terminated) { return Promise.reject('The client has already been closed.'); @@ -237,20 +239,14 @@ export class BigQueryStorageClient { ); const apiCall = this._gaxModule.createApiCall( - innerCallPromise, + callPromise, this._defaults[methodName], - this._descriptors.page[methodName] || - this._descriptors.stream[methodName] || - this._descriptors.longrunning[methodName] + this.descriptors.page[methodName] || + this.descriptors.stream[methodName] || + this.descriptors.longrunning[methodName] ); - this._innerApiCalls[methodName] = ( - argument: {}, - callOptions?: CallOptions, - callback?: APICallback - ) => { - return apiCall(argument, callOptions, callback); - }; + this.innerApiCalls[methodName] = apiCall; } return this.bigQueryStorageStub; @@ -311,26 +307,37 @@ export class BigQueryStorageClient { // -- Service calls -- // ------------------- createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest | undefined ), {} | undefined ] >; createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -379,26 +386,28 @@ export class BigQueryStorageClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.IReadSession, + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest | undefined ), {} | undefined @@ -422,29 +431,40 @@ export class BigQueryStorageClient { 'table_reference.dataset_id': request.tableReference!.datasetId || '', }); this.initialize(); - return this._innerApiCalls.createReadSession(request, options, callback); + return this.innerApiCalls.createReadSession(request, options, callback); } batchCreateReadSessionStreams( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest | undefined ), {} | undefined ] >; batchCreateReadSessionStreams( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + batchCreateReadSessionStreams( + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -468,26 +488,28 @@ export class BigQueryStorageClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ batchCreateReadSessionStreams( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest | undefined ), {} | undefined @@ -510,33 +532,44 @@ export class BigQueryStorageClient { 'session.name': request.session!.name || '', }); this.initialize(); - return this._innerApiCalls.batchCreateReadSessionStreams( + return this.innerApiCalls.batchCreateReadSessionStreams( request, options, callback ); } finalizeStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.protobuf.IEmpty, + protos.google.protobuf.IEmpty, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest | undefined ), {} | undefined ] >; finalizeStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.protobuf.IEmpty, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + finalizeStream( + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -566,26 +599,28 @@ export class BigQueryStorageClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ finalizeStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.protobuf.IEmpty, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.protobuf.IEmpty, - | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.protobuf.IEmpty, + protos.google.protobuf.IEmpty, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest | undefined ), {} | undefined @@ -608,29 +643,40 @@ export class BigQueryStorageClient { 'stream.name': request.stream!.name || '', }); this.initialize(); - return this._innerApiCalls.finalizeStream(request, options, callback); + return this.innerApiCalls.finalizeStream(request, options, callback); } splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest | undefined ), {} | undefined ] >; splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -667,26 +713,28 @@ export class BigQueryStorageClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest | undefined ), {} | undefined @@ -709,7 +757,7 @@ export class BigQueryStorageClient { 'original_stream.name': request.originalStream!.name || '', }); this.initialize(); - return this._innerApiCalls.splitReadStream(request, options, callback); + return this.innerApiCalls.splitReadStream(request, options, callback); } /** @@ -735,7 +783,7 @@ export class BigQueryStorageClient { * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. */ readRows( - request?: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, options?: gax.CallOptions ): gax.CancellableStream { request = request || {}; @@ -748,7 +796,7 @@ export class BigQueryStorageClient { 'read_position.stream.name': request.readPosition!.stream!.name || '', }); this.initialize(); - return this._innerApiCalls.readRows(request, options); + return this.innerApiCalls.readRows(request, options); } // -------------------- @@ -764,10 +812,10 @@ export class BigQueryStorageClient { * @returns {string} Resource name string. */ readSessionPath(project: string, location: string, session: string) { - return this._pathTemplates.readSessionPathTemplate.render({ - project, - location, - session, + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, }); } @@ -779,7 +827,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the project. */ matchProjectFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .project; } @@ -791,7 +839,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the location. */ matchLocationFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .location; } @@ -803,7 +851,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the session. */ matchSessionFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .session; } @@ -816,10 +864,10 @@ export class BigQueryStorageClient { * @returns {string} Resource name string. */ streamPath(project: string, location: string, stream: string) { - return this._pathTemplates.streamPathTemplate.render({ - project, - location, - stream, + return this.pathTemplates.streamPathTemplate.render({ + project: project, + location: location, + stream: stream, }); } @@ -831,7 +879,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the project. */ matchProjectFromStreamName(streamName: string) { - return this._pathTemplates.streamPathTemplate.match(streamName).project; + return this.pathTemplates.streamPathTemplate.match(streamName).project; } /** @@ -842,7 +890,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the location. */ matchLocationFromStreamName(streamName: string) { - return this._pathTemplates.streamPathTemplate.match(streamName).location; + return this.pathTemplates.streamPathTemplate.match(streamName).location; } /** @@ -853,7 +901,7 @@ export class BigQueryStorageClient { * @returns {string} A string representing the stream. */ matchStreamFromStreamName(streamName: string) { - return this._pathTemplates.streamPathTemplate.match(streamName).stream; + return this.pathTemplates.streamPathTemplate.match(streamName).stream; } /** diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index d99e302fbd4..9a6620a3300 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,23 +1,5 @@ { - "updateTime": "2020-03-22T12:01:46.515920Z", - "sources": [ - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0be7105dc52590fa9a24e784052298ae37ce53aa", - "internalRef": "302154871", - "log": "0be7105dc52590fa9a24e784052298ae37ce53aa\nAdd BUILD.bazel file to asset/v1p1beta1\n\nPiperOrigin-RevId: 302154871\n\n6c248fd13e8543f8d22cbf118d978301a9fbe2a8\nAdd missing resource annotations and additional_bindings to dialogflow v2 API.\n\nPiperOrigin-RevId: 302063117\n\n9a3a7f33be9eeacf7b3e98435816b7022d206bd7\nChange the service name from \"chromeos-moblab.googleapis.com\" to \"chromeosmoblab.googleapis.com\"\n\nPiperOrigin-RevId: 302060989\n\n98a339237577e3de26cb4921f75fb5c57cc7a19f\nfeat: devtools/build/v1 publish client library config annotations\n\n* add details field to some of the BuildEvents\n* add final_invocation_id and build_tool_exit_code fields to BuildStatus\n\nPiperOrigin-RevId: 302044087\n\ncfabc98c6bbbb22d1aeaf7612179c0be193b3a13\nfeat: home/graph/v1 publish client library config annotations & comment updates\n\nThis change includes adding the client library configuration annotations, updated proto comments, and some client library configuration files.\n\nPiperOrigin-RevId: 302042647\n\nc8c8c0bd15d082db9546253dbaad1087c7a9782c\nchore: use latest gapic-generator in bazel WORKSPACE.\nincluding the following commits from gapic-generator:\n- feat: take source protos in all sub-packages (#3144)\n\nPiperOrigin-RevId: 301843591\n\ne4daf5202ea31cb2cb6916fdbfa9d6bd771aeb4c\nAdd bazel file for v1 client lib generation\n\nPiperOrigin-RevId: 301802926\n\n275fbcce2c900278d487c33293a3c7e1fbcd3a34\nfeat: pubsub/v1 add an experimental filter field to Subscription\n\nPiperOrigin-RevId: 301661567\n\nf2b18cec51d27c999ad30011dba17f3965677e9c\nFix: UpdateBackupRequest.backup is a resource, not a resource reference - remove annotation.\n\nPiperOrigin-RevId: 301636171\n\n800384063ac93a0cac3a510d41726fa4b2cd4a83\nCloud Billing Budget API v1beta1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301634389\n\n0cc6c146b660db21f04056c3d58a4b752ee445e3\nCloud Billing Budget API v1alpha1\nModified api documentation to include warnings about the new filter field.\n\nPiperOrigin-RevId: 301630018\n\nff2ea00f69065585c3ac0993c8b582af3b6fc215\nFix: Add resource definition for a parent of InspectTemplate which was otherwise missing.\n\nPiperOrigin-RevId: 301623052\n\n55fa441c9daf03173910760191646399338f2b7c\nAdd proto definition for AccessLevel, AccessPolicy, and ServicePerimeter.\n\nPiperOrigin-RevId: 301620844\n\ne7b10591c5408a67cf14ffafa267556f3290e262\nCloud Bigtable Managed Backup service and message proto files.\n\nPiperOrigin-RevId: 301585144\n\nd8e226f702f8ddf92915128c9f4693b63fb8685d\nfeat: Add time-to-live in a queue for builds\n\nPiperOrigin-RevId: 301579876\n\n430375af011f8c7a5174884f0d0e539c6ffa7675\ndocs: add missing closing backtick\n\nPiperOrigin-RevId: 301538851\n\n0e9f1f60ded9ad1c2e725e37719112f5b487ab65\nbazel: Use latest release of gax_java\n\nPiperOrigin-RevId: 301480457\n\n5058c1c96d0ece7f5301a154cf5a07b2ad03a571\nUpdate GAPIC v2 with batching parameters for Logging API\n\nPiperOrigin-RevId: 301443847\n\n64ab9744073de81fec1b3a6a931befc8a90edf90\nFix: Introduce location-based organization/folder/billing-account resources\nChore: Update copyright years\n\nPiperOrigin-RevId: 301373760\n\n23d5f09e670ebb0c1b36214acf78704e2ecfc2ac\nUpdate field_behavior annotations in V1 and V2.\n\nPiperOrigin-RevId: 301337970\n\nb2cf37e7fd62383a811aa4d54d013ecae638851d\nData Catalog V1 API\n\nPiperOrigin-RevId: 301282503\n\n1976b9981e2900c8172b7d34b4220bdb18c5db42\nCloud DLP api update. Adds missing fields to Finding and adds support for hybrid jobs.\n\nPiperOrigin-RevId: 301205325\n\nae78682c05e864d71223ce22532219813b0245ac\nfix: several sample code blocks in comments are now properly indented for markdown\n\nPiperOrigin-RevId: 301185150\n\ndcd171d04bda5b67db13049320f97eca3ace3731\nPublish Media Translation API V1Beta1\n\nPiperOrigin-RevId: 301180096\n\nff1713453b0fbc5a7544a1ef6828c26ad21a370e\nAdd protos and BUILD rules for v1 API.\n\nPiperOrigin-RevId: 301179394\n\n8386761d09819b665b6a6e1e6d6ff884bc8ff781\nfeat: chromeos/modlab publish protos and config for Chrome OS Moblab API.\n\nPiperOrigin-RevId: 300843960\n\nb2e2bc62fab90e6829e62d3d189906d9b79899e4\nUpdates to GCS gRPC API spec:\n\n1. Changed GetIamPolicy and TestBucketIamPermissions to use wrapper messages around google.iam.v1 IAM requests messages, and added CommonRequestParams. This lets us support RequesterPays buckets.\n2. Added a metadata field to GetObjectMediaResponse, to support resuming an object media read safely (by extracting the generation of the object being read, and using it in the resumed read request).\n\nPiperOrigin-RevId: 300817706\n\n7fd916ce12335cc9e784bb9452a8602d00b2516c\nAdd deprecated_collections field for backward-compatiblity in PHP and monolith-generated Python and Ruby clients.\n\nGenerate TopicName class in Java which covers the functionality of both ProjectTopicName and DeletedTopicName. Introduce breaking changes to be fixed by synth.py.\n\nDelete default retry parameters.\n\nRetry codes defs can be deleted once # https://github.com/googleapis/gapic-generator/issues/3137 is fixed.\n\nPiperOrigin-RevId: 300813135\n\n047d3a8ac7f75383855df0166144f891d7af08d9\nfix!: google/rpc refactor ErrorInfo.type to ErrorInfo.reason and comment updates.\n\nPiperOrigin-RevId: 300773211\n\nfae4bb6d5aac52aabe5f0bb4396466c2304ea6f6\nAdding RetryPolicy to pubsub.proto\n\nPiperOrigin-RevId: 300769420\n\n7d569be2928dbd72b4e261bf9e468f23afd2b950\nAdding additional protocol buffer annotations to v3.\n\nPiperOrigin-RevId: 300718800\n\n13942d1a85a337515040a03c5108993087dc0e4f\nAdd logging protos for Recommender v1.\n\nPiperOrigin-RevId: 300689896\n\na1a573c3eecfe2c404892bfa61a32dd0c9fb22b6\nfix: change go package to use cloud.google.com/go/maps\n\nPiperOrigin-RevId: 300661825\n\nc6fbac11afa0c7ab2972d9df181493875c566f77\nfeat: publish documentai/v1beta2 protos\n\nPiperOrigin-RevId: 300656808\n\n5202a9e0d9903f49e900f20fe5c7f4e42dd6588f\nProtos for v1beta1 release of Cloud Security Center Settings API\n\nPiperOrigin-RevId: 300580858\n\n83518e18655d9d4ac044acbda063cc6ecdb63ef8\nAdds gapic.yaml file and BUILD.bazel file.\n\nPiperOrigin-RevId: 300554200\n\n836c196dc8ef8354bbfb5f30696bd3477e8db5e2\nRegenerate recommender v1beta1 gRPC ServiceConfig file for Insights methods.\n\nPiperOrigin-RevId: 300549302\n\n34a5450c591b6be3d6566f25ac31caa5211b2f3f\nIncreases the default timeout from 20s to 30s for MetricService\n\nPiperOrigin-RevId: 300474272\n\n5d8bffe87cd01ba390c32f1714230e5a95d5991d\nfeat: use the latest gapic-generator in WORKSPACE for bazel build.\n\nPiperOrigin-RevId: 300461878\n\nd631c651e3bcfac5d371e8560c27648f7b3e2364\nUpdated the GAPIC configs to include parameters for Backups APIs.\n\nPiperOrigin-RevId: 300443402\n\n678afc7055c1adea9b7b54519f3bdb228013f918\nAdding Game Servers v1beta API.\n\nPiperOrigin-RevId: 300433218\n\n80d2bd2c652a5e213302041b0620aff423132589\nEnable proto annotation and gapic v2 for talent API.\n\nPiperOrigin-RevId: 300393997\n\n85e454be7a353f7fe1bf2b0affb753305785b872\ndocs(google/maps/roads): remove mention of nonexported api\n\nPiperOrigin-RevId: 300367734\n\nbf839ae632e0f263a729569e44be4b38b1c85f9c\nAdding protocol buffer annotations and updated config info for v1 and v2.\n\nPiperOrigin-RevId: 300276913\n\n309b899ca18a4c604bce63882a161d44854da549\nPublish `Backup` APIs and protos.\n\nPiperOrigin-RevId: 300246038\n\neced64c3f122421350b4aca68a28e89121d20db8\nadd PHP client libraries\n\nPiperOrigin-RevId: 300193634\n\n7727af0e39df1ae9ad715895c8576d7b65cf6c6d\nfeat: use the latest gapic-generator and protoc-java-resource-name-plugin in googleapis/WORKSPACE.\n\nPiperOrigin-RevId: 300188410\n\n2a25aa351dd5b5fe14895266aff5824d90ce757b\nBreaking change: remove the ProjectOrTenant resource and its references.\n\nPiperOrigin-RevId: 300182152\n\na499dbb28546379415f51803505cfb6123477e71\nUpdate web risk v1 gapic config and BUILD file.\n\nPiperOrigin-RevId: 300152177\n\n52701da10fec2a5f9796e8d12518c0fe574488fe\nFix: apply appropriate namespace/package options for C#, PHP and Ruby.\n\nPiperOrigin-RevId: 300123508\n\n365c029b8cdb63f7751b92ab490f1976e616105c\nAdd CC targets to the kms protos.\n\nThese are needed by go/tink.\n\nPiperOrigin-RevId: 300038469\n\n4ba9aa8a4a1413b88dca5a8fa931824ee9c284e6\nExpose logo recognition API proto for GA.\n\nPiperOrigin-RevId: 299971671\n\n1c9fc2c9e03dadf15f16b1c4f570955bdcebe00e\nAdding ruby_package option to accessapproval.proto for the Ruby client libraries generation.\n\nPiperOrigin-RevId: 299955924\n\n1cc6f0a7bfb147e6f2ede911d9b01e7a9923b719\nbuild(google/maps/routes): generate api clients\n\nPiperOrigin-RevId: 299955905\n\n29a47c965aac79e3fe8e3314482ca0b5967680f0\nIncrease timeout to 1hr for method `dropRange` in bigtable/admin/v2, which is\nsynced with the timeout setting in gapic_yaml.\n\nPiperOrigin-RevId: 299917154\n\n8f631c4c70a60a9c7da3749511ee4ad432b62898\nbuild(google/maps/roads/v1op): move go to monorepo pattern\n\nPiperOrigin-RevId: 299885195\n\nd66816518844ebbf63504c9e8dfc7133921dd2cd\nbuild(google/maps/roads/v1op): Add bazel build files to generate clients.\n\nPiperOrigin-RevId: 299851148\n\naf7dff701fabe029672168649c62356cf1bb43d0\nAdd LogPlayerReports and LogImpressions to Playable Locations service\n\nPiperOrigin-RevId: 299724050\n\nb6927fca808f38df32a642c560082f5bf6538ced\nUpdate BigQuery Connection API v1beta1 proto: added credential to CloudSqlProperties.\n\nPiperOrigin-RevId: 299503150\n\n91e1fb5ef9829c0c7a64bfa5bde330e6ed594378\nchore: update protobuf (protoc) version to 3.11.2\n\nPiperOrigin-RevId: 299404145\n\n30e36b4bee6749c4799f4fc1a51cc8f058ba167d\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 299399890\n\nffbb493674099f265693872ae250711b2238090c\nfeat: cloudbuild/v1 add new fields and annotate OUTPUT_OUT fields.\n\nPiperOrigin-RevId: 299397780\n\nbc973a15818e00c19e121959832676e9b7607456\nbazel: Fix broken common dependency\n\nPiperOrigin-RevId: 299397431\n\n71094a343e3b962e744aa49eb9338219537474e4\nchore: bigtable/admin/v2 publish retry config\n\nPiperOrigin-RevId: 299391875\n\n8f488efd7bda33885cb674ddd023b3678c40bd82\nfeat: Migrate logging to GAPIC v2; release new features.\n\nIMPORTANT: This is a breaking change for client libraries\nin all languages.\n\nCommitter: @lukesneeringer, @jskeet\nPiperOrigin-RevId: 299370279\n\n007605bf9ad3a1fd775014ebefbf7f1e6b31ee71\nUpdate API for bigqueryreservation v1beta1.\n- Adds flex capacity commitment plan to CapacityCommitment.\n- Adds methods for getting and updating BiReservations.\n- Adds methods for updating/splitting/merging CapacityCommitments.\n\nPiperOrigin-RevId: 299368059\n\n" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "7e98e1609c91082f4eeb63b530c6468aefd18cfd" - } - } - ], + "updateTime": "2020-03-31T19:15:42.799551Z", "destinations": [ { "client": { diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 72dceac69f8..1f6ab3784e4 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -16,7 +16,6 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** - /* eslint-disable node/no-missing-require, no-unused-vars */ const storage = require('@google-cloud/bigquery-storage'); diff --git a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts b/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts deleted file mode 100644 index 8bc4a193677..00000000000 --- a/handwritten/bigquery-storage/test/gapic-big_query_storage-v1beta1.ts +++ /dev/null @@ -1,410 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protosTypes from '../protos/protos'; -import * as assert from 'assert'; -import {describe, it} from 'mocha'; -const bigquerystorageModule = require('../src'); - -import {PassThrough} from 'stream'; - -const FAKE_STATUS_CODE = 1; -class FakeError { - name: string; - message: string; - code: number; - constructor(n: number) { - this.name = 'fakeName'; - this.message = 'fake message'; - this.code = n; - } -} -const error = new FakeError(FAKE_STATUS_CODE); -export interface Callback { - (err: FakeError | null, response?: {} | null): void; -} - -export class Operation { - constructor() {} - promise() {} -} -function mockSimpleGrpcMethod( - expectedRequest: {}, - response: {} | null, - error: FakeError | null -) { - return (actualRequest: {}, options: {}, callback: Callback) => { - assert.deepStrictEqual(actualRequest, expectedRequest); - if (error) { - callback(error); - } else if (response) { - callback(null, response); - } else { - callback(null); - } - }; -} -function mockServerStreamingGrpcMethod( - expectedRequest: {}, - response: {} | null, - error: FakeError | null -) { - return (actualRequest: {}) => { - assert.deepStrictEqual(actualRequest, expectedRequest); - const mockStream = new PassThrough({ - objectMode: true, - transform: (chunk: {}, enc: {}, callback: Callback) => { - if (error) { - callback(error); - } else { - callback(null, response); - } - }, - }); - return mockStream; - }; -} -describe('v1beta1.BigQueryStorageClient', () => { - it('has servicePath', () => { - const servicePath = - bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; - assert(servicePath); - }); - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; - assert(apiEndpoint); - }); - it('has port', () => { - const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; - assert(port); - assert(typeof port === 'number'); - }); - it('should create a client with no option', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); - assert(client); - }); - it('should create a client with gRPC fallback', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - fallback: true, - }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryStorageStub, undefined); - await client.initialize(); - assert(client.bigQueryStorageStub); - }); - it('has close method', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.close(); - }); - describe('createReadSession', () => { - it('invokes createReadSession without error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.createReadSession(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes createReadSession with error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest = {}; - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( - request, - null, - error - ); - client.createReadSession(request, (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); - describe('batchCreateReadSessionStreams', () => { - it('invokes batchCreateReadSessionStreams without error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; - request.session = {}; - request.session.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.batchCreateReadSessionStreams = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.batchCreateReadSessionStreams(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes batchCreateReadSessionStreams with error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest = {}; - request.session = {}; - request.session.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.batchCreateReadSessionStreams = mockSimpleGrpcMethod( - request, - null, - error - ); - client.batchCreateReadSessionStreams( - request, - (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - } - ); - }); - }); - describe('finalizeStream', () => { - it('invokes finalizeStream without error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; - request.stream = {}; - request.stream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.finalizeStream = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.finalizeStream(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes finalizeStream with error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest = {}; - request.stream = {}; - request.stream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.finalizeStream = mockSimpleGrpcMethod( - request, - null, - error - ); - client.finalizeStream(request, (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); - describe('splitReadStream', () => { - it('invokes splitReadStream without error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; - request.originalStream = {}; - request.originalStream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.splitReadStream(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes splitReadStream with error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest = {}; - request.originalStream = {}; - request.originalStream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( - request, - null, - error - ); - client.splitReadStream(request, (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); - describe('readRows', () => { - it('invokes readRows without error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; - request.readPosition = {}; - request.readPosition.stream = {}; - request.readPosition.stream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( - request, - expectedResponse, - null - ); - const stream = client.readRows(request); - stream.on('data', (response: {}) => { - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - stream.on('error', (err: FakeError) => { - done(err); - }); - stream.write(); - }); - it('invokes readRows with error', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest = {}; - request.readPosition = {}; - request.readPosition.stream = {}; - request.readPosition.stream.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( - request, - null, - error - ); - const stream = client.readRows(request); - stream.on('data', () => { - assert.fail(); - }); - stream.on('error', (err: FakeError) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - done(); - }); - stream.write(); - }); - }); -}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts new file mode 100644 index 00000000000..47eca9a12c9 --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -0,0 +1,865 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigquerystorageModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message).toObject( + instance as protobuf.Message, + {defaults: true} + ); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubServerStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // write something to the stream to trigger transformStub and send the response back to the client + setImmediate(() => { + mockStream.write({}); + }); + setImmediate(() => { + mockStream.end(); + }); + return sinon.stub().returns(mockStream); +} + +describe('v1beta1.BigQueryStorageClient', () => { + it('has servicePath', () => { + const servicePath = + bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + await client.initialize(); + assert(client.bigQueryStorageStub); + }); + + it('has close method', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.close(); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createReadSession', () => { + it('invokes createReadSession without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + const expectedHeaderRequestParams = + 'table_reference.project_id=&table_reference.dataset_id='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); + const [response] = await client.createReadSession(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes createReadSession without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + const expectedHeaderRequestParams = + 'table_reference.project_id=&table_reference.dataset_id='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.createReadSession( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.IReadSession | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes createReadSession with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + const expectedHeaderRequestParams = + 'table_reference.project_id=&table_reference.dataset_id='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createReadSession = stubSimpleCall( + undefined, + expectedError + ); + assert.rejects(async () => { + await client.createReadSession(request); + }, expectedError); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('batchCreateReadSessionStreams', () => { + it('invokes batchCreateReadSessionStreams without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session = {}; + request.session.name = ''; + const expectedHeaderRequestParams = 'session.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + ); + client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( + expectedResponse + ); + const [response] = await client.batchCreateReadSessionStreams(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes batchCreateReadSessionStreams without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session = {}; + request.session.name = ''; + const expectedHeaderRequestParams = 'session.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + ); + client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.batchCreateReadSessionStreams( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes batchCreateReadSessionStreams with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session = {}; + request.session.name = ''; + const expectedHeaderRequestParams = 'session.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( + undefined, + expectedError + ); + assert.rejects(async () => { + await client.batchCreateReadSessionStreams(request); + }, expectedError); + assert( + (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('finalizeStream', () => { + it('invokes finalizeStream without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream = {}; + request.stream.name = ''; + const expectedHeaderRequestParams = 'stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.finalizeStream = stubSimpleCall(expectedResponse); + const [response] = await client.finalizeStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.finalizeStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes finalizeStream without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream = {}; + request.stream.name = ''; + const expectedHeaderRequestParams = 'stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.finalizeStream = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.finalizeStream( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.finalizeStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes finalizeStream with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream = {}; + request.stream.name = ''; + const expectedHeaderRequestParams = 'stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.finalizeStream = stubSimpleCall( + undefined, + expectedError + ); + assert.rejects(async () => { + await client.finalizeStream(request); + }, expectedError); + assert( + (client.innerApiCalls.finalizeStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('splitReadStream', () => { + it('invokes splitReadStream without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream = {}; + request.originalStream.name = ''; + const expectedHeaderRequestParams = 'original_stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); + const [response] = await client.splitReadStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes splitReadStream without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream = {}; + request.originalStream.name = ''; + const expectedHeaderRequestParams = 'original_stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.splitReadStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes splitReadStream with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream = {}; + request.originalStream.name = ''; + const expectedHeaderRequestParams = 'original_stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.splitReadStream = stubSimpleCall( + undefined, + expectedError + ); + assert.rejects(async () => { + await client.splitReadStream(request); + }, expectedError); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('readRows', () => { + it('invokes readRows without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition = {}; + request.readPosition.stream = {}; + request.readPosition.stream.name = ''; + const expectedHeaderRequestParams = 'read_position.stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.readRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions) + ); + }); + + it('invokes readRows with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition = {}; + request.readPosition.stream = {}; + request.readPosition.stream.name = ''; + const expectedHeaderRequestParams = 'read_position.stream.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.readRows = stubServerStreamingCall( + undefined, + expectedError + ); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.innerApiCalls.readRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions) + ); + }); + }); + + describe('Path templates', () => { + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('stream', () => { + const fakePath = '/rendered/path/stream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + stream: 'streamValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.streamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.streamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('streamPath', () => { + const result = client.streamPath( + 'projectValue', + 'locationValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.streamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromStreamName', () => { + const result = client.matchProjectFromStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromStreamName', () => { + const result = client.matchLocationFromStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromStreamName', () => { + const result = client.matchStreamFromStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js index b4d07b13300..683c6ec0233 100644 --- a/handwritten/bigquery-storage/webpack.config.js +++ b/handwritten/bigquery-storage/webpack.config.js @@ -36,27 +36,27 @@ module.exports = { { test: /\.tsx?$/, use: 'ts-loader', - exclude: /node_modules/ + exclude: /node_modules/, }, { test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]grpc/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]retry-request/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]gtoken/, - use: 'null-loader' + use: 'null-loader', }, ], }, From b338e69efd52c6adfe20d6664cd2f43ee1c7058d Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Tue, 31 Mar 2020 18:41:38 -0700 Subject: [PATCH 021/333] build: set AUTOSYNTH_MULTIPLE_COMMITS=true for context aware commits (#41) --- handwritten/bigquery-storage/synth.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index ec0cb3d4f47..564adc79dcb 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -17,6 +17,9 @@ import subprocess import logging logging.basicConfig(level=logging.DEBUG) + +AUTOSYNTH_MULTIPLE_COMMITS = True + # Run the gapic generator gapic = gcp.GAPICMicrogenerator() name = 'bigquerystorage' From 036b7e1511cca44217304e8f57487749348bd8ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Apr 2020 23:39:05 +0200 Subject: [PATCH 022/333] chore(deps): update dependency @types/sinon to v9 (#42) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [@types/sinon](https://togithub.com/DefinitelyTyped/DefinitelyTyped) | devDependencies | major | [`^7.5.2` -> `^9.0.0`](https://renovatebot.com/diffs/npm/@types%2fsinon/7.5.2/9.0.0) | --- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 930799f438a..d3919de0baf 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -31,7 +31,7 @@ "devDependencies": { "@types/mocha": "^7.0.1", "@types/node": "^13.7.1", - "@types/sinon": "^7.5.2", + "@types/sinon": "^9.0.0", "c8": "^7.1.0", "eslint": "^6.8.0", "eslint-config-prettier": "^6.10.0", From 781640318e713e230e356b40e7bc01dcecbd123c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 2 Apr 2020 13:00:43 -0700 Subject: [PATCH 023/333] chore: linting (#44) --- handwritten/bigquery-storage/README.md | 4 ++-- .../src/v1beta1/big_query_storage_client.ts | 8 +------- handwritten/bigquery-storage/synth.metadata | 19 ++++++++++++++++++- .../system-test/fixtures/sample/src/index.ts | 2 +- 4 files changed, 22 insertions(+), 11 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 40069706e54..cbd319b1dc1 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -151,7 +151,7 @@ async function bigqueryStorageQuickstart() { client .readRows(readRowsRequest) .on('error', console.error) - .on('data', function(data) { + .on('data', data => { try { const decodedData = avroType.decode( data.avroRows.serializedBinaryRows @@ -168,7 +168,7 @@ async function bigqueryStorageQuickstart() { console.log(error); } }) - .on('end', function() { + .on('end', () => { console.log( `Got ${names.size} unique names in states: ${Object.keys(states)}` ); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 98cac03fa98..89d815f9e06 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -17,13 +17,7 @@ // ** All changes to this file may be overwritten. ** import * as gax from 'google-gax'; -import { - GaxCall, - Callback, - CallOptions, - Descriptors, - ClientOptions, -} from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; import * as protos from '../../protos/protos'; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 9a6620a3300..2c96de14d13 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,5 +1,22 @@ { - "updateTime": "2020-03-31T19:15:42.799551Z", + "updateTime": "2020-04-02T12:08:46.500507Z", + "sources": [ + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "75047719f704d61f405cac6d7439637ab36c1232", + "internalRef": "304305096" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "99820243d348191bc9c634f2b48ddf65096285ed" + } + } + ], "destinations": [ { "client": { diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index c0e08eebba4..d53a99905ac 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -19,7 +19,7 @@ import {BigQueryStorageClient} from '@google-cloud/bigquery-storage'; function main() { - const bigQueryStorageClient = new BigQueryStorageClient(); + new BigQueryStorageClient(); } main(); From a94c1738e7b91f4b6653d7a33f62c60d1f8a0cdf Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Sun, 5 Apr 2020 12:51:03 -0700 Subject: [PATCH 024/333] chore: remove duplicate mocha config (#46) --- handwritten/bigquery-storage/.mocharc.json | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 handwritten/bigquery-storage/.mocharc.json diff --git a/handwritten/bigquery-storage/.mocharc.json b/handwritten/bigquery-storage/.mocharc.json deleted file mode 100644 index 670c5e2c24b..00000000000 --- a/handwritten/bigquery-storage/.mocharc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} From a973e8bd79770a3a4b05250f36cc0d285c9b54c2 Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Mon, 6 Apr 2020 22:47:41 -0700 Subject: [PATCH 025/333] feat: add V1 client (#28) * feat: generate v1 * chore: update exports and excludes * style: linted * chore: updates exports * chore: linted Co-authored-by: Benjamin E. Coe --- .../cloud/bigquery/storage/v1/arrow.proto | 45 + .../cloud/bigquery/storage/v1/avro.proto | 41 + .../cloud/bigquery/storage/v1/storage.proto | 236 + .../cloud/bigquery/storage/v1/stream.proto | 137 + .../bigquery-storage/protos/protos.d.ts | 1676 +++++++ handwritten/bigquery-storage/protos/protos.js | 3889 +++++++++++++++++ .../bigquery-storage/protos/protos.json | 342 ++ handwritten/bigquery-storage/src/index.ts | 8 +- .../src/v1/big_query_read_client.ts | 698 +++ .../src/v1/big_query_read_client_config.json | 44 + .../src/v1/big_query_read_proto_list.json | 6 + handwritten/bigquery-storage/src/v1/index.ts | 19 + .../src/v1beta1/big_query_storage_client.ts | 12 +- handwritten/bigquery-storage/synth.metadata | 24 +- handwritten/bigquery-storage/synth.py | 4 +- .../system-test/fixtures/sample/src/index.js | 4 +- .../system-test/fixtures/sample/src/index.ts | 5 +- .../test/gapic-big_query_read-v1.ts | 288 ++ .../bigquery-storage/webpack.config.js | 4 +- 19 files changed, 7459 insertions(+), 23 deletions(-) create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client.ts create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json create mode 100644 handwritten/bigquery-storage/src/v1/index.ts create mode 100644 handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto new file mode 100644 index 00000000000..90add9780e1 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -0,0 +1,45 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "ArrowProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Arrow schema as specified in +// https://arrow.apache.org/docs/python/api/datatypes.html +// and serialized to bytes using IPC: +// https://arrow.apache.org/docs/ipc.html. +// +// See code samples on how this message can be deserialized. +message ArrowSchema { + // IPC serialized Arrow schema. + bytes serialized_schema = 1; +} + +// Arrow RecordBatch. +message ArrowRecordBatch { + // IPC-serialized Arrow RecordBatch. + bytes serialized_record_batch = 1; + + // The count of rows in `serialized_record_batch`. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto new file mode 100644 index 00000000000..9a064447b9f --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "AvroProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Avro schema. +message AvroSchema { + // Json serialized schema, as described at + // https://avro.apache.org/docs/1.8.1/spec.html. + string schema = 1; +} + +// Avro rows. +message AvroRows { + // Binary serialized rows in a block. + bytes serialized_binary_rows = 1; + + // The count of rows in the returning block. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto new file mode 100644 index 00000000000..26fcd6ac25c --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -0,0 +1,236 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1/arrow.proto"; +import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/cloud/bigquery/storage/v1/stream.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "StorageProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; +option (google.api.resource_definition) = { + type: "bigquery.googleapis.com/Table" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}" +}; + +// BigQuery Read API. +// +// The Read API can be used to read data from BigQuery. +service BigQueryRead { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.readonly," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Data is assigned to each stream such that roughly the same number of + // rows can be read from each stream. Because the server-side unit for + // assigning data is collections of rows, the API does not guarantee that + // each stream will return the same number or rows. Additionally, the + // limits are enforced based on the number of pre-filtered rows, so some + // filters can lead to lopsided assignments. + // + // Read sessions automatically expire 24 hours after they are created and do + // not require manual clean-up by the caller. + rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { + option (google.api.http) = { + post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" + body: "*" + }; + option (google.api.method_signature) = "parent,read_session,max_stream_count"; + } + + // Reads rows from the stream in the format prescribed by the ReadSession. + // Each response contains one or more table rows, up to a maximum of 100 MiB + // per response; read requests which attempt to read individual rows larger + // than 100 MiB will fail. + // + // Each request also returns a set of stream statistics reflecting the current + // state of the stream. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { + get: "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" + }; + option (google.api.method_signature) = "read_stream,offset"; + } + + // Splits a given `ReadStream` into two `ReadStream` objects. These + // `ReadStream` objects are referred to as the primary and the residual + // streams of the split. The original `ReadStream` can still be read from in + // the same manner as before. Both of the returned `ReadStream` objects can + // also be read from, and the rows returned by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back-to-back in the + // original `ReadStream`. Concretely, it is guaranteed that for streams + // original, primary, and residual, that original[0-j] = primary[0-j] and + // original[j-n] = residual[0-m] once the streams have been read to + // completion. + rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + }; + } +} + +// Request message for `CreateReadSession`. +message CreateReadSessionRequest { + // Required. The request project that owns the session, in the form of + // `projects/{project_id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. Session to be created. + ReadSession read_session = 2 [(google.api.field_behavior) = REQUIRED]; + + // Max initial number of streams. If unset or zero, the server will + // provide a value of streams so as to produce reasonable throughput. Must be + // non-negative. The number of streams may be lower than the requested number, + // depending on the amount parallelism that is reasonable for the table. Error + // will be returned if the max count is greater than the current system + // max limit of 1,000. + // + // Streams must be read starting from offset 0. + int32 max_stream_count = 3; +} + +// Request message for `ReadRows`. +message ReadRowsRequest { + // Required. Stream to read rows from. + string read_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/ReadStream" + } + ]; + + // The offset requested must be less than the last row read from Read. + // Requesting a larger offset is undefined. If not specified, start reading + // from offset zero. + int64 offset = 2; +} + +// Information on if the current connection is being throttled. +message ThrottleState { + // How much this connection is being throttled. Zero means no throttling, + // 100 means fully throttled. + int32 throttle_percent = 1; +} + +// Estimated stream statistics for a given Stream. +message StreamStats { + message Progress { + // The fraction of rows assigned to the stream that have been processed by + // the server so far, not including the rows in the current response + // message. + // + // This value, along with `at_response_end`, can be used to interpolate + // the progress made as the rows in the message are being processed using + // the following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the + // `at_response_start` value of the current response. + double at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the + // rows in the current response. + double at_response_end = 2; + } + + // Represents the progress of the current stream. + Progress progress = 2; +} + +// Response from calling `ReadRows` may include row data, progress and +// throttling information. +message ReadRowsResponse { + // Row data is returned in format specified during session creation. + oneof rows { + // Serialized row data in AVRO format. + AvroRows avro_rows = 3; + + // Serialized row data in Arrow RecordBatch format. + ArrowRecordBatch arrow_record_batch = 4; + } + + // Number of serialized rows in the rows block. + int64 row_count = 6; + + // Statistics for the stream. + StreamStats stats = 2; + + // Throttling state. If unset, the latest response still describes + // the current throttling status. + ThrottleState throttle_state = 5; +} + +// Request message for `SplitReadStream`. +message SplitReadStreamRequest { + // Required. Name of the stream to split. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/ReadStream" + } + ]; + + // A value in the range (0.0, 1.0) that specifies the fractional point at + // which the original stream should be split. The actual split point is + // evaluated on pre-filtered rows, so if a filter is provided, then there is + // no guarantee that the division of the rows between the new child streams + // will be proportional to this fractional value. Additionally, because the + // server-side unit for assigning data is collections of rows, this fraction + // will always map to a data storage boundary on the server side. + double fraction = 2; +} + +// Response message for `SplitReadStream`. +message SplitReadStreamResponse { + // Primary stream, which contains the beginning portion of + // |original_stream|. An empty value indicates that the original stream can no + // longer be split. + ReadStream primary_stream = 1; + + // Remainder stream, which contains the tail of |original_stream|. An empty + // value indicates that the original stream can no longer be split. + ReadStream remainder_stream = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto new file mode 100644 index 00000000000..19d4231da5b --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -0,0 +1,137 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1/arrow.proto"; +import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "StreamProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Data format for input or output data. +enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0; + + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + AVRO = 1; + + // Arrow is a standard open source column-based message format. + // See https://arrow.apache.org/ for more details. + ARROW = 2; +} + +// Information about the ReadSession. +message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + + // Additional attributes when reading a table. + message TableModifiers { + // The snapshot time of the table. If not set, interpreted as now. + google.protobuf.Timestamp snapshot_time = 1; + } + + // Options dictating how we read a table. + message TableReadOptions { + // Names of the fields in the table that should be read. If empty, all + // fields will be read. If the specified field is a nested field, all + // the sub-fields in the field will be selected. The output field order is + // unrelated to the order of fields in selected_fields. + repeated string selected_fields = 1; + + // SQL text filtering statement, similar to a WHERE clause in a query. + // Aggregates are not supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + string row_restriction = 2; + } + + // Output only. Unique identifier for the session, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. The expire_time is + // automatically assigned and currently cannot be specified or updated. + google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Data format of the output data. + DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. + oneof schema { + // Output only. Avro schema. + AvroSchema avro_schema = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Arrow schema. + ArrowSchema arrow_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + } + + // Immutable. Table that this ReadSession is reading from, in the form + // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id} + string table = 6 [ + (google.api.field_behavior) = IMMUTABLE, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } + ]; + + // Optional. Any modifiers which are applied when reading from the specified table. + TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Read options for this session (e.g. column selection, filters). + TableReadOptions read_options = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. A list of streams created with the session. + // + // At least one stream is created with the session. In the future, larger + // request_stream_count values *may* result in this list being unpopulated, + // in that case, the user will need to use a List method to get the streams + // instead, which is not yet available. + repeated ReadStream streams = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Information about a single stream that gets data out of the storage system. +// Most of the information about `ReadStream` instances is aggregated, making +// `ReadStream` lightweight. +message ReadStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadStream" + pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + }; + + // Output only. Name of the stream, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index a0b708af703..33f0b578737 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -26,6 +26,1682 @@ export namespace google { /** Namespace storage. */ namespace storage { + /** Namespace v1. */ + namespace v1 { + + /** Properties of an ArrowSchema. */ + interface IArrowSchema { + + /** ArrowSchema serializedSchema */ + serializedSchema?: (Uint8Array|string|null); + } + + /** Represents an ArrowSchema. */ + class ArrowSchema implements IArrowSchema { + + /** + * Constructs a new ArrowSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSchema); + + /** ArrowSchema serializedSchema. */ + public serializedSchema: (Uint8Array|string); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSchema): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Verifies an ArrowSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @param message ArrowSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an ArrowRecordBatch. */ + interface IArrowRecordBatch { + + /** ArrowRecordBatch serializedRecordBatch */ + serializedRecordBatch?: (Uint8Array|string|null); + + /** ArrowRecordBatch rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an ArrowRecordBatch. */ + class ArrowRecordBatch implements IArrowRecordBatch { + + /** + * Constructs a new ArrowRecordBatch. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch); + + /** ArrowRecordBatch serializedRecordBatch. */ + public serializedRecordBatch: (Uint8Array|string); + + /** ArrowRecordBatch rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowRecordBatch instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Verifies an ArrowRecordBatch message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowRecordBatch + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @param message ArrowRecordBatch + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an AvroSchema. */ + interface IAvroSchema { + + /** AvroSchema schema */ + schema?: (string|null); + } + + /** Represents an AvroSchema. */ + class AvroSchema implements IAvroSchema { + + /** + * Constructs a new AvroSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSchema); + + /** AvroSchema schema. */ + public schema: string; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSchema): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Verifies an AvroSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @param message AvroSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an AvroRows. */ + interface IAvroRows { + + /** AvroRows serializedBinaryRows */ + serializedBinaryRows?: (Uint8Array|string|null); + + /** AvroRows rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an AvroRows. */ + class AvroRows implements IAvroRows { + + /** + * Constructs a new AvroRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroRows); + + /** AvroRows serializedBinaryRows. */ + public serializedBinaryRows: (Uint8Array|string); + + /** AvroRows rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new AvroRows instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroRows): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Verifies an AvroRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @param message AvroRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Represents a BigQueryRead */ + class BigQueryRead extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryRead service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryRead service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryRead; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadSession + */ + public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback): void; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @returns Promise + */ + public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): Promise; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + */ + public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback): void; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @returns Promise + */ + public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest): Promise; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback): void; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @returns Promise + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): Promise; + } + + namespace BigQueryRead { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#createReadSession}. + * @param error Error, if any + * @param [response] ReadSession + */ + type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadSession) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#readRows}. + * @param error Error, if any + * @param [response] ReadRowsResponse + */ + type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#splitReadStream}. + * @param error Error, if any + * @param [response] SplitReadStreamResponse + */ + type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.SplitReadStreamResponse) => void; + } + + /** Properties of a CreateReadSessionRequest. */ + interface ICreateReadSessionRequest { + + /** CreateReadSessionRequest parent */ + parent?: (string|null); + + /** CreateReadSessionRequest readSession */ + readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); + + /** CreateReadSessionRequest maxStreamCount */ + maxStreamCount?: (number|null); + } + + /** Represents a CreateReadSessionRequest. */ + class CreateReadSessionRequest implements ICreateReadSessionRequest { + + /** + * Constructs a new CreateReadSessionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest); + + /** CreateReadSessionRequest parent. */ + public parent: string; + + /** CreateReadSessionRequest readSession. */ + public readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); + + /** CreateReadSessionRequest maxStreamCount. */ + public maxStreamCount: number; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateReadSessionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Verifies a CreateReadSessionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateReadSessionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @param message CreateReadSessionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReadRowsRequest. */ + interface IReadRowsRequest { + + /** ReadRowsRequest readStream */ + readStream?: (string|null); + + /** ReadRowsRequest offset */ + offset?: (number|Long|string|null); + } + + /** Represents a ReadRowsRequest. */ + class ReadRowsRequest implements IReadRowsRequest { + + /** + * Constructs a new ReadRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest); + + /** ReadRowsRequest readStream. */ + public readStream: string; + + /** ReadRowsRequest offset. */ + public offset: (number|Long|string); + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Verifies a ReadRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @param message ReadRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ThrottleState. */ + interface IThrottleState { + + /** ThrottleState throttlePercent */ + throttlePercent?: (number|null); + } + + /** Represents a ThrottleState. */ + class ThrottleState implements IThrottleState { + + /** + * Constructs a new ThrottleState. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IThrottleState); + + /** ThrottleState throttlePercent. */ + public throttlePercent: number; + + /** + * Creates a new ThrottleState instance using the specified properties. + * @param [properties] Properties to set + * @returns ThrottleState instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IThrottleState): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @param message ThrottleState message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @param message ThrottleState message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ThrottleState message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Decodes a ThrottleState message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Verifies a ThrottleState message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ThrottleState + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. + * @param message ThrottleState + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ThrottleState, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ThrottleState to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a StreamStats. */ + interface IStreamStats { + + /** StreamStats progress */ + progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); + } + + /** Represents a StreamStats. */ + class StreamStats implements IStreamStats { + + /** + * Constructs a new StreamStats. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IStreamStats); + + /** StreamStats progress. */ + public progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); + + /** + * Creates a new StreamStats instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamStats instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IStreamStats): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @param message StreamStats message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @param message StreamStats message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamStats message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Decodes a StreamStats message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Verifies a StreamStats message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamStats + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Creates a plain object from a StreamStats message. Also converts values to other types if specified. + * @param message StreamStats + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamStats to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace StreamStats { + + /** Properties of a Progress. */ + interface IProgress { + + /** Progress atResponseStart */ + atResponseStart?: (number|null); + + /** Progress atResponseEnd */ + atResponseEnd?: (number|null); + } + + /** Represents a Progress. */ + class Progress implements IProgress { + + /** + * Constructs a new Progress. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress); + + /** Progress atResponseStart. */ + public atResponseStart: number; + + /** Progress atResponseEnd. */ + public atResponseEnd: number; + + /** + * Creates a new Progress instance using the specified properties. + * @param [properties] Properties to set + * @returns Progress instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Verifies a Progress message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Progress + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @param message Progress + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Progress to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a ReadRowsResponse. */ + interface IReadRowsResponse { + + /** ReadRowsResponse avroRows */ + avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch */ + arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount */ + rowCount?: (number|Long|string|null); + + /** ReadRowsResponse stats */ + stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); + + /** ReadRowsResponse throttleState */ + throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + } + + /** Represents a ReadRowsResponse. */ + class ReadRowsResponse implements IReadRowsResponse { + + /** + * Constructs a new ReadRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse); + + /** ReadRowsResponse avroRows. */ + public avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch. */ + public arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount. */ + public rowCount: (number|Long|string); + + /** ReadRowsResponse stats. */ + public stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); + + /** ReadRowsResponse throttleState. */ + public throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + + /** ReadRowsResponse rows. */ + public rows?: ("avroRows"|"arrowRecordBatch"); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Verifies a ReadRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @param message ReadRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamRequest. */ + interface ISplitReadStreamRequest { + + /** SplitReadStreamRequest name */ + name?: (string|null); + + /** SplitReadStreamRequest fraction */ + fraction?: (number|null); + } + + /** Represents a SplitReadStreamRequest. */ + class SplitReadStreamRequest implements ISplitReadStreamRequest { + + /** + * Constructs a new SplitReadStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest); + + /** SplitReadStreamRequest name. */ + public name: string; + + /** SplitReadStreamRequest fraction. */ + public fraction: number; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Verifies a SplitReadStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @param message SplitReadStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamResponse. */ + interface ISplitReadStreamResponse { + + /** SplitReadStreamResponse primaryStream */ + primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** SplitReadStreamResponse remainderStream */ + remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + } + + /** Represents a SplitReadStreamResponse. */ + class SplitReadStreamResponse implements ISplitReadStreamResponse { + + /** + * Constructs a new SplitReadStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse); + + /** SplitReadStreamResponse primaryStream. */ + public primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** SplitReadStreamResponse remainderStream. */ + public remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Verifies a SplitReadStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @param message SplitReadStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 2 + } + + /** Properties of a ReadSession. */ + interface IReadSession { + + /** ReadSession name */ + name?: (string|null); + + /** ReadSession expireTime */ + expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession dataFormat */ + dataFormat?: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat|null); + + /** ReadSession avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadSession arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadSession table */ + table?: (string|null); + + /** ReadSession tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); + + /** ReadSession readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); + } + + /** Represents a ReadSession. */ + class ReadSession implements IReadSession { + + /** + * Constructs a new ReadSession. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadSession); + + /** ReadSession name. */ + public name: string; + + /** ReadSession expireTime. */ + public expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession dataFormat. */ + public dataFormat: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat); + + /** ReadSession avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadSession arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadSession table. */ + public table: string; + + /** ReadSession tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); + + /** ReadSession readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1.IReadStream[]; + + /** ReadSession schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + + /** + * Creates a new ReadSession instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadSession instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadSession): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Verifies a ReadSession message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadSession + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @param message ReadSession + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadSession to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace ReadSession { + + /** Properties of a TableModifiers. */ + interface ITableModifiers { + + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { + + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers); + + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { + + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); + } + + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { + + /** + * Constructs a new TableReadOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions); + + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReadOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Verifies a TableReadOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReadOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReadOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a ReadStream. */ + interface IReadStream { + + /** ReadStream name */ + name?: (string|null); + } + + /** Represents a ReadStream. */ + class ReadStream implements IReadStream { + + /** + * Constructs a new ReadStream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadStream); + + /** ReadStream name. */ + public name: string; + + /** + * Creates a new ReadStream instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadStream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadStream): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Verifies a ReadStream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadStream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @param message ReadStream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadStream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + /** Namespace v1beta1. */ namespace v1beta1 { diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 8da21884529..911efd69b28 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -66,6 +66,3895 @@ */ var storage = {}; + storage.v1 = (function() { + + /** + * Namespace v1. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1 = {}; + + v1.ArrowSchema = (function() { + + /** + * Properties of an ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowSchema + * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema + */ + + /** + * Constructs a new ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowSchema. + * @implements IArrowSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set + */ + function ArrowSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSchema serializedSchema. + * @member {Uint8Array} serializedSchema + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @instance + */ + ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema instance + */ + ArrowSchema.create = function create(properties) { + return new ArrowSchema(properties); + }; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); + return writer; + }; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedSchema = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) + return "serializedSchema: buffer expected"; + return null; + }; + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + */ + ArrowSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); + if (object.serializedSchema != null) + if (typeof object.serializedSchema === "string") + $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); + else if (object.serializedSchema.length) + message.serializedSchema = object.serializedSchema; + return message; + }; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowSchema} message ArrowSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if (options.bytes === String) + object.serializedSchema = ""; + else { + object.serializedSchema = []; + if (options.bytes !== Array) + object.serializedSchema = $util.newBuffer(object.serializedSchema); + } + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; + return object; + }; + + /** + * Converts this ArrowSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @instance + * @returns {Object.} JSON object + */ + ArrowSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ArrowSchema; + })(); + + v1.ArrowRecordBatch = (function() { + + /** + * Properties of an ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowRecordBatch + * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch + * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount + */ + + /** + * Constructs a new ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowRecordBatch. + * @implements IArrowRecordBatch + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set + */ + function ArrowRecordBatch(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowRecordBatch serializedRecordBatch. + * @member {Uint8Array} serializedRecordBatch + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); + + /** + * ArrowRecordBatch rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch instance + */ + ArrowRecordBatch.create = function create(properties) { + return new ArrowRecordBatch(properties); + }; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedRecordBatch = reader.bytes(); + break; + case 2: + message.rowCount = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowRecordBatch message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowRecordBatch.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) + return "serializedRecordBatch: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + */ + ArrowRecordBatch.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); + if (object.serializedRecordBatch != null) + if (typeof object.serializedRecordBatch === "string") + $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); + else if (object.serializedRecordBatch.length) + message.serializedRecordBatch = object.serializedRecordBatch; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowRecordBatch} message ArrowRecordBatch + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowRecordBatch.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedRecordBatch = ""; + else { + object.serializedRecordBatch = []; + if (options.bytes !== Array) + object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + * @returns {Object.} JSON object + */ + ArrowRecordBatch.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ArrowRecordBatch; + })(); + + v1.AvroSchema = (function() { + + /** + * Properties of an AvroSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroSchema + * @property {string|null} [schema] AvroSchema schema + */ + + /** + * Constructs a new AvroSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroSchema. + * @implements IAvroSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set + */ + function AvroSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSchema schema. + * @member {string} schema + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @instance + */ + AvroSchema.prototype.schema = ""; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema instance + */ + AvroSchema.create = function create(properties) { + return new AvroSchema(properties); + }; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.schema != null && message.hasOwnProperty("schema")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); + return writer; + }; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.schema = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.schema != null && message.hasOwnProperty("schema")) + if (!$util.isString(message.schema)) + return "schema: string expected"; + return null; + }; + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + */ + AvroSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); + if (object.schema != null) + message.schema = String(object.schema); + return message; + }; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.AvroSchema} message AvroSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.schema = ""; + if (message.schema != null && message.hasOwnProperty("schema")) + object.schema = message.schema; + return object; + }; + + /** + * Converts this AvroSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @instance + * @returns {Object.} JSON object + */ + AvroSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return AvroSchema; + })(); + + v1.AvroRows = (function() { + + /** + * Properties of an AvroRows. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroRows + * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows + * @property {number|Long|null} [rowCount] AvroRows rowCount + */ + + /** + * Constructs a new AvroRows. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroRows. + * @implements IAvroRows + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set + */ + function AvroRows(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroRows serializedBinaryRows. + * @member {Uint8Array} serializedBinaryRows + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + */ + AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); + + /** + * AvroRows rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + */ + AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new AvroRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows instance + */ + AvroRows.create = function create(properties) { + return new AvroRows(properties); + }; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.serializedBinaryRows = reader.bytes(); + break; + case 2: + message.rowCount = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) + return "serializedBinaryRows: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + */ + AvroRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); + if (object.serializedBinaryRows != null) + if (typeof object.serializedBinaryRows === "string") + $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); + else if (object.serializedBinaryRows.length) + message.serializedBinaryRows = object.serializedBinaryRows; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.AvroRows} message AvroRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedBinaryRows = ""; + else { + object.serializedBinaryRows = []; + if (options.bytes !== Array) + object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this AvroRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + * @returns {Object.} JSON object + */ + AvroRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return AvroRows; + })(); + + v1.BigQueryRead = (function() { + + /** + * Constructs a new BigQueryRead service. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BigQueryRead + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryRead(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryRead.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryRead; + + /** + * Creates new BigQueryRead service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryRead} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryRead.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#createReadSession}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef CreateReadSessionCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.ReadSession} [response] ReadSession + */ + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.createReadSession = function createReadSession(request, callback) { + return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1.ReadSession, request, callback); + }, "name", { value: "CreateReadSession" }); + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#readRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef ReadRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} [response] ReadRowsResponse + */ + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.readRows = function readRows(request, callback) { + return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1.ReadRowsResponse, request, callback); + }, "name", { value: "ReadRows" }); + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#splitReadStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef SplitReadStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} [response] SplitReadStreamResponse + */ + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.splitReadStream = function splitReadStream(request, callback) { + return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse, request, callback); + }, "name", { value: "SplitReadStream" }); + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryRead; + })(); + + v1.CreateReadSessionRequest = (function() { + + /** + * Properties of a CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ICreateReadSessionRequest + * @property {string|null} [parent] CreateReadSessionRequest parent + * @property {google.cloud.bigquery.storage.v1.IReadSession|null} [readSession] CreateReadSessionRequest readSession + * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount + */ + + /** + * Constructs a new CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a CreateReadSessionRequest. + * @implements ICreateReadSessionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set + */ + function CreateReadSessionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateReadSessionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.parent = ""; + + /** + * CreateReadSessionRequest readSession. + * @member {google.cloud.bigquery.storage.v1.IReadSession|null|undefined} readSession + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.readSession = null; + + /** + * CreateReadSessionRequest maxStreamCount. + * @member {number} maxStreamCount + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.maxStreamCount = 0; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest instance + */ + CreateReadSessionRequest.create = function create(properties) { + return new CreateReadSessionRequest(properties); + }; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && message.hasOwnProperty("parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.readSession != null && message.hasOwnProperty("readSession")) + $root.google.cloud.bigquery.storage.v1.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount); + return writer; + }; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.parent = reader.string(); + break; + case 2: + message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.decode(reader, reader.uint32()); + break; + case 3: + message.maxStreamCount = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateReadSessionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateReadSessionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.readSession != null && message.hasOwnProperty("readSession")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.verify(message.readSession); + if (error) + return "readSession." + error; + } + if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + if (!$util.isInteger(message.maxStreamCount)) + return "maxStreamCount: integer expected"; + return null; + }; + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + */ + CreateReadSessionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.readSession != null) { + if (typeof object.readSession !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.CreateReadSessionRequest.readSession: object expected"); + message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.fromObject(object.readSession); + } + if (object.maxStreamCount != null) + message.maxStreamCount = object.maxStreamCount | 0; + return message; + }; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} message CreateReadSessionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateReadSessionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.readSession = null; + object.maxStreamCount = 0; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.readSession != null && message.hasOwnProperty("readSession")) + object.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.toObject(message.readSession, options); + if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + object.maxStreamCount = message.maxStreamCount; + return object; + }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateReadSessionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return CreateReadSessionRequest; + })(); + + v1.ReadRowsRequest = (function() { + + /** + * Properties of a ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadRowsRequest + * @property {string|null} [readStream] ReadRowsRequest readStream + * @property {number|Long|null} [offset] ReadRowsRequest offset + */ + + /** + * Constructs a new ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadRowsRequest. + * @implements IReadRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set + */ + function ReadRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsRequest readStream. + * @member {string} readStream + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.readStream = ""; + + /** + * ReadRowsRequest offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest instance + */ + ReadRowsRequest.create = function create(properties) { + return new ReadRowsRequest(properties); + }; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.readStream != null && message.hasOwnProperty("readStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.readStream); + if (message.offset != null && message.hasOwnProperty("offset")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); + return writer; + }; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.readStream = reader.string(); + break; + case 2: + message.offset = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.readStream != null && message.hasOwnProperty("readStream")) + if (!$util.isString(message.readStream)) + return "readStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + */ + ReadRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); + if (object.readStream != null) + message.readStream = String(object.readStream); + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ReadRowsRequest} message ReadRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.readStream = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + } + if (message.readStream != null && message.hasOwnProperty("readStream")) + object.readStream = message.readStream; + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; + + /** + * Converts this ReadRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + * @returns {Object.} JSON object + */ + ReadRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsRequest; + })(); + + v1.ThrottleState = (function() { + + /** + * Properties of a ThrottleState. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IThrottleState + * @property {number|null} [throttlePercent] ThrottleState throttlePercent + */ + + /** + * Constructs a new ThrottleState. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ThrottleState. + * @implements IThrottleState + * @constructor + * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set + */ + function ThrottleState(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ThrottleState throttlePercent. + * @member {number} throttlePercent + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @instance + */ + ThrottleState.prototype.throttlePercent = 0; + + /** + * Creates a new ThrottleState instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState instance + */ + ThrottleState.create = function create(properties) { + return new ThrottleState(properties); + }; + + /** + * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleState.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + return writer; + }; + + /** + * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleState.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ThrottleState message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleState.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.throttlePercent = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ThrottleState message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleState.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ThrottleState message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ThrottleState.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (!$util.isInteger(message.throttlePercent)) + return "throttlePercent: integer expected"; + return null; + }; + + /** + * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + */ + ThrottleState.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ThrottleState) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); + if (object.throttlePercent != null) + message.throttlePercent = object.throttlePercent | 0; + return message; + }; + + /** + * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.ThrottleState} message ThrottleState + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ThrottleState.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.throttlePercent = 0; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + object.throttlePercent = message.throttlePercent; + return object; + }; + + /** + * Converts this ThrottleState to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @instance + * @returns {Object.} JSON object + */ + ThrottleState.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ThrottleState; + })(); + + v1.StreamStats = (function() { + + /** + * Properties of a StreamStats. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IStreamStats + * @property {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null} [progress] StreamStats progress + */ + + /** + * Constructs a new StreamStats. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a StreamStats. + * @implements IStreamStats + * @constructor + * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set + */ + function StreamStats(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamStats progress. + * @member {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null|undefined} progress + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @instance + */ + StreamStats.prototype.progress = null; + + /** + * Creates a new StreamStats instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats instance + */ + StreamStats.create = function create(properties) { + return new StreamStats(properties); + }; + + /** + * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStats.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.progress != null && message.hasOwnProperty("progress")) + $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.encode(message.progress, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStats.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamStats message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStats.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamStats message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStats.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamStats message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamStats.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.progress != null && message.hasOwnProperty("progress")) { + var error = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.verify(message.progress); + if (error) + return "progress." + error; + } + return null; + }; + + /** + * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + */ + StreamStats.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); + if (object.progress != null) { + if (typeof object.progress !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.StreamStats.progress: object expected"); + message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.fromObject(object.progress); + } + return message; + }; + + /** + * Creates a plain object from a StreamStats message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats} message StreamStats + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamStats.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.progress = null; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.toObject(message.progress, options); + return object; + }; + + /** + * Converts this StreamStats to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @instance + * @returns {Object.} JSON object + */ + StreamStats.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + StreamStats.Progress = (function() { + + /** + * Properties of a Progress. + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @interface IProgress + * @property {number|null} [atResponseStart] Progress atResponseStart + * @property {number|null} [atResponseEnd] Progress atResponseEnd + */ + + /** + * Constructs a new Progress. + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @classdesc Represents a Progress. + * @implements IProgress + * @constructor + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set + */ + function Progress(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Progress atResponseStart. + * @member {number} atResponseStart + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + */ + Progress.prototype.atResponseStart = 0; + + /** + * Progress atResponseEnd. + * @member {number} atResponseEnd + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + */ + Progress.prototype.atResponseEnd = 0; + + /** + * Creates a new Progress instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress instance + */ + Progress.create = function create(properties) { + return new Progress(properties); + }; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.atResponseStart); + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.atResponseEnd); + return writer; + }; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.atResponseStart = reader.double(); + break; + case 2: + message.atResponseEnd = reader.double(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Progress message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Progress.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (typeof message.atResponseStart !== "number") + return "atResponseStart: number expected"; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (typeof message.atResponseEnd !== "number") + return "atResponseEnd: number expected"; + return null; + }; + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + */ + Progress.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats.Progress) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); + if (object.atResponseStart != null) + message.atResponseStart = Number(object.atResponseStart); + if (object.atResponseEnd != null) + message.atResponseEnd = Number(object.atResponseEnd); + return message; + }; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.Progress} message Progress + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Progress.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.atResponseStart = 0; + object.atResponseEnd = 0; + } + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + return object; + }; + + /** + * Converts this Progress to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + * @returns {Object.} JSON object + */ + Progress.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Progress; + })(); + + return StreamStats; + })(); + + v1.ReadRowsResponse = (function() { + + /** + * Properties of a ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadRowsResponse + * @property {google.cloud.bigquery.storage.v1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows + * @property {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch + * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount + * @property {google.cloud.bigquery.storage.v1.IStreamStats|null} [stats] ReadRowsResponse stats + * @property {google.cloud.bigquery.storage.v1.IThrottleState|null} [throttleState] ReadRowsResponse throttleState + */ + + /** + * Constructs a new ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadRowsResponse. + * @implements IReadRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set + */ + function ReadRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsResponse avroRows. + * @member {google.cloud.bigquery.storage.v1.IAvroRows|null|undefined} avroRows + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroRows = null; + + /** + * ReadRowsResponse arrowRecordBatch. + * @member {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null|undefined} arrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowRecordBatch = null; + + /** + * ReadRowsResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadRowsResponse stats. + * @member {google.cloud.bigquery.storage.v1.IStreamStats|null|undefined} stats + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.stats = null; + + /** + * ReadRowsResponse throttleState. + * @member {google.cloud.bigquery.storage.v1.IThrottleState|null|undefined} throttleState + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.throttleState = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadRowsResponse rows. + * @member {"avroRows"|"arrowRecordBatch"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse instance + */ + ReadRowsResponse.create = function create(properties) { + return new ReadRowsResponse(properties); + }; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stats != null && message.hasOwnProperty("stats")) + $root.google.cloud.bigquery.storage.v1.StreamStats.encode(message.stats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) + $root.google.cloud.bigquery.storage.v1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) + $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.throttleState != null && message.hasOwnProperty("throttleState")) + $root.google.cloud.bigquery.storage.v1.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: + message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); + break; + case 4: + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + case 6: + message.rowCount = reader.int64(); + break; + case 2: + message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.decode(reader, reader.uint32()); + break; + case 5: + message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroRows.verify(message.avroRows); + if (error) + return "avroRows." + error; + } + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify(message.arrowRecordBatch); + if (error) + return "arrowRecordBatch." + error; + } + } + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + if (message.stats != null && message.hasOwnProperty("stats")) { + var error = $root.google.cloud.bigquery.storage.v1.StreamStats.verify(message.stats); + if (error) + return "stats." + error; + } + if (message.throttleState != null && message.hasOwnProperty("throttleState")) { + var error = $root.google.cloud.bigquery.storage.v1.ThrottleState.verify(message.throttleState); + if (error) + return "throttleState." + error; + } + return null; + }; + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + */ + ReadRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); + if (object.avroRows != null) { + if (typeof object.avroRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroRows: object expected"); + message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.fromObject(object.avroRows); + } + if (object.arrowRecordBatch != null) { + if (typeof object.arrowRecordBatch !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowRecordBatch: object expected"); + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); + } + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + if (object.stats != null) { + if (typeof object.stats !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.stats: object expected"); + message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.fromObject(object.stats); + } + if (object.throttleState != null) { + if (typeof object.throttleState !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.throttleState: object expected"); + message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.fromObject(object.throttleState); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} message ReadRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.stats = null; + object.throttleState = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.stats != null && message.hasOwnProperty("stats")) + object.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.toObject(message.stats, options); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + object.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.toObject(message.avroRows, options); + if (options.oneofs) + object.rows = "avroRows"; + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); + if (options.oneofs) + object.rows = "arrowRecordBatch"; + } + if (message.throttleState != null && message.hasOwnProperty("throttleState")) + object.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.toObject(message.throttleState, options); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ReadRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + * @returns {Object.} JSON object + */ + ReadRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsResponse; + })(); + + v1.SplitReadStreamRequest = (function() { + + /** + * Properties of a SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ISplitReadStreamRequest + * @property {string|null} [name] SplitReadStreamRequest name + * @property {number|null} [fraction] SplitReadStreamRequest fraction + */ + + /** + * Constructs a new SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a SplitReadStreamRequest. + * @implements ISplitReadStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set + */ + function SplitReadStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamRequest name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.name = ""; + + /** + * SplitReadStreamRequest fraction. + * @member {number} fraction + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.fraction = 0; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest instance + */ + SplitReadStreamRequest.create = function create(properties) { + return new SplitReadStreamRequest(properties); + }; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.fraction != null && message.hasOwnProperty("fraction")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.fraction); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.fraction = reader.double(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.fraction != null && message.hasOwnProperty("fraction")) + if (typeof message.fraction !== "number") + return "fraction: number expected"; + return null; + }; + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + */ + SplitReadStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); + if (object.name != null) + message.name = String(object.name); + if (object.fraction != null) + message.fraction = Number(object.fraction); + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.fraction = 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.fraction != null && message.hasOwnProperty("fraction")) + object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + return object; + }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamRequest; + })(); + + v1.SplitReadStreamResponse = (function() { + + /** + * Properties of a SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ISplitReadStreamResponse + * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [primaryStream] SplitReadStreamResponse primaryStream + * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [remainderStream] SplitReadStreamResponse remainderStream + */ + + /** + * Constructs a new SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a SplitReadStreamResponse. + * @implements ISplitReadStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set + */ + function SplitReadStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamResponse primaryStream. + * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} primaryStream + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.primaryStream = null; + + /** + * SplitReadStreamResponse remainderStream. + * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} remainderStream + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.remainderStream = null; + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse instance + */ + SplitReadStreamResponse.create = function create(properties) { + return new SplitReadStreamResponse(properties); + }; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + case 2: + message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.primaryStream); + if (error) + return "primaryStream." + error; + } + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.remainderStream); + if (error) + return "remainderStream." + error; + } + return null; + }; + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + */ + SplitReadStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); + if (object.primaryStream != null) { + if (typeof object.primaryStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.primaryStream: object expected"); + message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.primaryStream); + } + if (object.remainderStream != null) { + if (typeof object.remainderStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.remainderStream: object expected"); + message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.remainderStream); + } + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.primaryStream = null; + object.remainderStream = null; + } + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + object.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.primaryStream, options); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + object.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.remainderStream, options); + return object; + }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamResponse; + })(); + + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1.DataFormat + * @enum {string} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=2 ARROW value + */ + v1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[2] = "ARROW"] = 2; + return values; + })(); + + v1.ReadSession = (function() { + + /** + * Properties of a ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1.DataFormat|null} [dataFormat] ReadSession dataFormat + * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {string|null} [table] ReadSession table + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions + * @property {Array.|null} [streams] ReadSession streams + */ + + /** + * Constructs a new ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadSession. + * @implements IReadSession + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + */ + function ReadSession(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.name = ""; + + /** + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.expireTime = null; + + /** + * ReadSession dataFormat. + * @member {google.cloud.bigquery.storage.v1.DataFormat} dataFormat + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.dataFormat = 0; + + /** + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession table. + * @member {string} table + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.table = ""; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession readOptions. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.readOptions = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession instance + */ + ReadSession.create = function create(properties) { + return new ReadSession(properties); + }; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) + $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.table != null && message.hasOwnProperty("table")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 3: + message.dataFormat = reader.int32(); + break; + case 4: + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + case 5: + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + case 6: + message.table = reader.string(); + break; + case 7: + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); + break; + case 8: + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); + break; + case 10: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadSession message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadSession.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); + if (error) + return "expireTime." + error; + } + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + switch (message.dataFormat) { + default: + return "dataFormat: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.table != null && message.hasOwnProperty("table")) + if (!$util.isString(message.table)) + return "table: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + */ + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + } + switch (object.dataFormat) { + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.dataFormat = 0; + break; + case "AVRO": + case 1: + message.dataFormat = 1; + break; + case "ARROW": + case 2: + message.dataFormat = 2; + break; + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.table != null) + message.table = String(object.table); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.fromObject(object.tableModifiers); + } + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.fromObject(object.readOptions); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession} message ReadSession + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadSession.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (options.defaults) { + object.name = ""; + object.expireTime = null; + object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.table = ""; + object.tableModifiers = null; + object.readOptions = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.table != null && message.hasOwnProperty("table")) + object.table = message.table; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.toObject(message.tableModifiers, options); + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.toObject(message.readOptions, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this ReadSession to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + * @returns {Object.} JSON object + */ + ReadSession.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + ReadSession.TableModifiers = (function() { + + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ + + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; + + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; + + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableModifiers; + })(); + + ReadSession.TableReadOptions = (function() { + + /** + * Properties of a TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + */ + + /** + * Constructs a new TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + */ + function TableReadOptions(properties) { + this.selectedFields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.selectedFields = $util.emptyArray; + + /** + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions instance + */ + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); + }; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + return writer; + }; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + case 2: + message.rowRestriction = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReadOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReadOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; + return null; + }; + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + */ + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); + } + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); + return message; + }; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} message TableReadOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReadOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; + return object; + }; + + /** + * Converts this TableReadOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + * @returns {Object.} JSON object + */ + TableReadOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableReadOptions; + })(); + + return ReadSession; + })(); + + v1.ReadStream = (function() { + + /** + * Properties of a ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadStream + * @property {string|null} [name] ReadStream name + */ + + /** + * Constructs a new ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadStream. + * @implements IReadStream + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + */ + function ReadStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @instance + */ + ReadStream.prototype.name = ""; + + /** + * Creates a new ReadStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream instance + */ + ReadStream.create = function create(properties) { + return new ReadStream(properties); + }; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.hasOwnProperty("name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + */ + ReadStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.ReadStream} message ReadStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this ReadStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @instance + * @returns {Object.} JSON object + */ + ReadStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadStream; + })(); + + return v1; + })(); + storage.v1beta1 = (function() { /** diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 6f3c1689461..b14e1b22555 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -8,6 +8,348 @@ "nested": { "storage": { "nested": { + "v1": { + "options": { + "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", + "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", + "java_multiple_files": true, + "java_outer_classname": "StreamProto", + "java_package": "com.google.cloud.bigquery.storage.v1", + "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1", + "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", + "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" + }, + "nested": { + "ArrowSchema": { + "fields": { + "serializedSchema": { + "type": "bytes", + "id": 1 + } + } + }, + "ArrowRecordBatch": { + "fields": { + "serializedRecordBatch": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "AvroSchema": { + "fields": { + "schema": { + "type": "string", + "id": 1 + } + } + }, + "AvroRows": { + "fields": { + "serializedBinaryRows": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "BigQueryRead": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateReadSession": { + "requestType": "CreateReadSessionRequest", + "responseType": "ReadSession", + "options": { + "(google.api.http).post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "parent,read_session,max_stream_count" + } + }, + "ReadRows": { + "requestType": "ReadRowsRequest", + "responseType": "ReadRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}", + "(google.api.method_signature)": "read_stream,offset" + } + }, + "SplitReadStream": { + "requestType": "SplitReadStreamRequest", + "responseType": "SplitReadStreamResponse", + "options": { + "(google.api.http).get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + } + } + } + }, + "CreateReadSessionRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" + } + }, + "readSession": { + "type": "ReadSession", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "maxStreamCount": { + "type": "int32", + "id": 3 + } + } + }, + "ReadRowsRequest": { + "fields": { + "readStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" + } + }, + "offset": { + "type": "int64", + "id": 2 + } + } + }, + "ThrottleState": { + "fields": { + "throttlePercent": { + "type": "int32", + "id": 1 + } + } + }, + "StreamStats": { + "fields": { + "progress": { + "type": "Progress", + "id": 2 + } + }, + "nested": { + "Progress": { + "fields": { + "atResponseStart": { + "type": "double", + "id": 1 + }, + "atResponseEnd": { + "type": "double", + "id": 2 + } + } + } + } + }, + "ReadRowsResponse": { + "oneofs": { + "rows": { + "oneof": [ + "avroRows", + "arrowRecordBatch" + ] + } + }, + "fields": { + "avroRows": { + "type": "AvroRows", + "id": 3 + }, + "arrowRecordBatch": { + "type": "ArrowRecordBatch", + "id": 4 + }, + "rowCount": { + "type": "int64", + "id": 6 + }, + "stats": { + "type": "StreamStats", + "id": 2 + }, + "throttleState": { + "type": "ThrottleState", + "id": 5 + } + } + }, + "SplitReadStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" + } + }, + "fraction": { + "type": "double", + "id": 2 + } + } + }, + "SplitReadStreamResponse": { + "fields": { + "primaryStream": { + "type": "ReadStream", + "id": 1 + }, + "remainderStream": { + "type": "ReadStream", + "id": 2 + } + } + }, + "DataFormat": { + "values": { + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, + "ARROW": 2 + } + }, + "ReadSession": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" + }, + "oneofs": { + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] + } + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "expireTime": { + "type": "google.protobuf.Timestamp", + "id": 2, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "dataFormat": { + "type": "DataFormat", + "id": 3, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + }, + "avroSchema": { + "type": "AvroSchema", + "id": 4, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "table": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "IMMUTABLE", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 7, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "readOptions": { + "type": "TableReadOptions", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "streams": { + "rule": "repeated", + "type": "ReadStream", + "id": 10, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + }, + "nested": { + "TableModifiers": { + "fields": { + "snapshotTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + } + } + }, + "TableReadOptions": { + "fields": { + "selectedFields": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "rowRestriction": { + "type": "string", + "id": 2 + } + } + } + } + }, + "ReadStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + } + } + } + }, "v1beta1": { "options": { "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index a3e89d46812..8716f5a6ad2 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -16,9 +16,13 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** +import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; +const BigQueryReadClient = v1.BigQueryReadClient; const BigQueryStorageClient = v1beta1.BigQueryStorageClient; -export {v1beta1, BigQueryStorageClient}; -export default {v1beta1, BigQueryStorageClient}; +export {v1, BigQueryReadClient, v1beta1, BigQueryStorageClient}; +// For compatibility with JavaScript libraries we need to provide this default export: +// tslint:disable-next-line no-default-export +export default {v1, BigQueryReadClient}; import * as protos from '../protos/protos'; export {protos}; diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts new file mode 100644 index 00000000000..069be0386c0 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -0,0 +1,698 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as gax from 'google-gax'; +import { + APICallback, + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import * as path from 'path'; + +import * as protosTypes from '../../protos/protos'; +import * as gapicConfig from './big_query_read_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * BigQuery Read API. + * + * The Read API can be used to read data from BigQuery. + * @class + * @memberof v1 + */ +export class BigQueryReadClient { + private _descriptors: Descriptors = {page: {}, stream: {}, longrunning: {}}; + private _innerApiCalls: {[name: string]: Function}; + private _pathTemplates: {[name: string]: gax.PathTemplate}; + private _terminated = false; + private _opts: ClientOptions; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + bigQueryReadStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryReadClient. + * + * @param {object} [options] - The configuration object. See the subsequent + * parameters for more details. + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + */ + + constructor(opts?: ClientOptions) { + // Ensure that options include the service address and port. + const staticMembers = this.constructor as typeof BigQueryReadClient; + const servicePath = + opts && opts.servicePath + ? opts.servicePath + : opts && opts.apiEndpoint + ? opts.apiEndpoint + : staticMembers.servicePath; + const port = opts && opts.port ? opts.port : staticMembers.port; + + if (!opts) { + opts = {servicePath, port}; + } + opts.servicePath = opts.servicePath || servicePath; + opts.port = opts.port || port; + opts.clientConfig = opts.clientConfig || {}; + + const isBrowser = typeof window !== 'undefined'; + if (isBrowser) { + opts.fallback = true; + } + // If we are in browser, we are already using fallback because of the + // "browser" field in package.json. + // But if we were explicitly requested to use fallback, let's do it now. + this._gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options + // sent to the client. + opts.scopes = (this.constructor as typeof BigQueryReadClient).scopes; + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + // For Node.js, pass the path to JSON proto file. + // For browsers, pass the JSON content. + + const nodejsProtoPath = path.join( + __dirname, + '..', + '..', + 'protos', + 'protos.json' + ); + this._protos = this._gaxGrpc.loadProto( + opts.fallback ? require('../../protos/protos.json') : nodejsProtoPath + ); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this._pathTemplates = { + readSessionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this._descriptors.stream = { + readRows: new this._gaxModule.StreamDescriptor( + gax.StreamType.SERVER_STREAMING + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1.BigQueryRead', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this._innerApiCalls = {}; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryReadStub) { + return this.bigQueryReadStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1.BigQueryRead. + this.bigQueryReadStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1.BigQueryRead' + ) + : // tslint:disable-next-line no-any + (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, + this._opts + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryReadStubMethods = [ + 'createReadSession', + 'readRows', + 'splitReadStream', + ]; + + for (const methodName of bigQueryReadStubMethods) { + const innerCallPromise = this.bigQueryReadStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + // eslint-disable-next-line prefer-spread + return stub[methodName].apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const apiCall = this._gaxModule.createApiCall( + innerCallPromise, + this._defaults[methodName], + this._descriptors.page[methodName] || + this._descriptors.stream[methodName] || + this._descriptors.longrunning[methodName] + ); + + this._innerApiCalls[methodName] = ( + argument: {}, + callOptions?: CallOptions, + callback?: APICallback + ) => { + return apiCall(argument, callOptions, callback); + }; + } + + return this.bigQueryReadStub; + } + + /** + * The DNS address for this API service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/bigquery.readonly', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @param {function(Error, string)} callback - the callback to + * be called with the current project Id. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined, + {} | undefined + > + ): void; + /** + * Creates a new read session. A read session divides the contents of a + * BigQuery table into one or more streams, which can then be used to read + * data from the table. The read session also specifies properties of the + * data to be read, such as a list of columns or a push-down filter describing + * the rows to be returned. + * + * A particular row can be read by at most one stream. When the caller has + * reached the end of each stream in the session, then all the data in the + * table has been read. + * + * Data is assigned to each stream such that roughly the same number of + * rows can be read from each stream. Because the server-side unit for + * assigning data is collections of rows, the API does not guarantee that + * each stream will return the same number or rows. Additionally, the + * limits are enforced based on the number of pre-filtered rows, so some + * filters can lead to lopsided assignments. + * + * Read sessions automatically expire 24 hours after they are created and do + * not require manual clean-up by the caller. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The request project that owns the session, in the form of + * `projects/{project_id}`. + * @param {google.cloud.bigquery.storage.v1.ReadSession} request.readSession + * Required. Session to be created. + * @param {number} request.maxStreamCount + * Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table. Error + * will be returned if the max count is greater than the current system + * max limit of 1,000. + * + * Streams must be read starting from offset 0. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1.ReadSession}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + createReadSession( + request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'read_session.table': request.readSession!.table || '', + }); + this.initialize(); + return this._innerApiCalls.createReadSession(request, options, callback); + } + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options?: gax.CallOptions + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options: gax.CallOptions, + callback: Callback< + protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined, + {} | undefined + > + ): void; + /** + * Splits a given `ReadStream` into two `ReadStream` objects. These + * `ReadStream` objects are referred to as the primary and the residual + * streams of the split. The original `ReadStream` can still be read from in + * the same manner as before. Both of the returned `ReadStream` objects can + * also be read from, and the rows returned by both child streams will be + * the same as the rows read from the original stream. + * + * Moreover, the two child streams will be allocated back-to-back in the + * original `ReadStream`. Concretely, it is guaranteed that for streams + * original, primary, and residual, that original[0-j] = primary[0-j] and + * original[j-n] = residual[0-m] once the streams have been read to + * completion. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to split. + * @param {number} request.fraction + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to a data storage boundary on the server side. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ + splitReadStream( + request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined, + {} | undefined + >, + callback?: Callback< + protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined, + {} | undefined + > + ): Promise< + [ + protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + name: request.name || '', + }); + this.initialize(); + return this._innerApiCalls.splitReadStream(request, options, callback); + } + + /** + * Reads rows from the stream in the format prescribed by the ReadSession. + * Each response contains one or more table rows, up to a maximum of 100 MiB + * per response; read requests which attempt to read individual rows larger + * than 100 MiB will fail. + * + * Each request also returns a set of stream statistics reflecting the current + * state of the stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.readStream + * Required. Stream to read rows from. + * @param {number} request.offset + * The offset requested must be less than the last row read from Read. + * Requesting a larger offset is undefined. If not specified, start reading + * from offset zero. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. + */ + readRows( + request?: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest, + options?: gax.CallOptions + ): gax.CancellableStream { + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + read_stream: request.readStream || '', + }); + this.initialize(); + return this._innerApiCalls.readRows(request, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this._pathTemplates.readSessionPathTemplate.render({ + project, + location, + session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string + ) { + return this._pathTemplates.readStreamPathTemplate.render({ + project, + location, + session, + stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Terminate the GRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + */ + close(): Promise { + this.initialize(); + if (!this._terminated) { + return this.bigQueryReadStub!.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json new file mode 100644 index 00000000000..42b2735b9fe --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json @@ -0,0 +1,44 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1.BigQueryRead": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateReadSession": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ReadRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "SplitReadStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json new file mode 100644 index 00000000000..13440ce253e --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json @@ -0,0 +1,6 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1/stream.proto" +] diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts new file mode 100644 index 00000000000..5eae3fb5e73 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -0,0 +1,19 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {BigQueryReadClient} from './big_query_read_client'; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 89d815f9e06..f464c061b9f 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -807,9 +807,9 @@ export class BigQueryStorageClient { */ readSessionPath(project: string, location: string, session: string) { return this.pathTemplates.readSessionPathTemplate.render({ - project: project, - location: location, - session: session, + project, + location, + session, }); } @@ -859,9 +859,9 @@ export class BigQueryStorageClient { */ streamPath(project: string, location: string, stream: string) { return this.pathTemplates.streamPathTemplate.render({ - project: project, - location: location, - stream: stream, + project, + location, + stream, }); } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 2c96de14d13..862f3735b43 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,19 +1,20 @@ { - "updateTime": "2020-04-02T12:08:46.500507Z", + "updateTime": "2020-03-11T21:35:37.421495Z", "sources": [ { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "75047719f704d61f405cac6d7439637ab36c1232", - "internalRef": "304305096" + "sha": "80d2bd2c652a5e213302041b0620aff423132589", + "internalRef": "300393997", + "log": "80d2bd2c652a5e213302041b0620aff423132589\nEnable proto annotation and gapic v2 for talent API.\n\nPiperOrigin-RevId: 300393997\n\n85e454be7a353f7fe1bf2b0affb753305785b872\ndocs(google/maps/roads): remove mention of nonexported api\n\nPiperOrigin-RevId: 300367734\n\nbf839ae632e0f263a729569e44be4b38b1c85f9c\nAdding protocol buffer annotations and updated config info for v1 and v2.\n\nPiperOrigin-RevId: 300276913\n\n309b899ca18a4c604bce63882a161d44854da549\nPublish `Backup` APIs and protos.\n\nPiperOrigin-RevId: 300246038\n\neced64c3f122421350b4aca68a28e89121d20db8\nadd PHP client libraries\n\nPiperOrigin-RevId: 300193634\n\n7727af0e39df1ae9ad715895c8576d7b65cf6c6d\nfeat: use the latest gapic-generator and protoc-java-resource-name-plugin in googleapis/WORKSPACE.\n\nPiperOrigin-RevId: 300188410\n\n2a25aa351dd5b5fe14895266aff5824d90ce757b\nBreaking change: remove the ProjectOrTenant resource and its references.\n\nPiperOrigin-RevId: 300182152\n\na499dbb28546379415f51803505cfb6123477e71\nUpdate web risk v1 gapic config and BUILD file.\n\nPiperOrigin-RevId: 300152177\n\n52701da10fec2a5f9796e8d12518c0fe574488fe\nFix: apply appropriate namespace/package options for C#, PHP and Ruby.\n\nPiperOrigin-RevId: 300123508\n\n365c029b8cdb63f7751b92ab490f1976e616105c\nAdd CC targets to the kms protos.\n\nThese are needed by go/tink.\n\nPiperOrigin-RevId: 300038469\n\n4ba9aa8a4a1413b88dca5a8fa931824ee9c284e6\nExpose logo recognition API proto for GA.\n\nPiperOrigin-RevId: 299971671\n\n1c9fc2c9e03dadf15f16b1c4f570955bdcebe00e\nAdding ruby_package option to accessapproval.proto for the Ruby client libraries generation.\n\nPiperOrigin-RevId: 299955924\n\n1cc6f0a7bfb147e6f2ede911d9b01e7a9923b719\nbuild(google/maps/routes): generate api clients\n\nPiperOrigin-RevId: 299955905\n\n29a47c965aac79e3fe8e3314482ca0b5967680f0\nIncrease timeout to 1hr for method `dropRange` in bigtable/admin/v2, which is\nsynced with the timeout setting in gapic_yaml.\n\nPiperOrigin-RevId: 299917154\n\n8f631c4c70a60a9c7da3749511ee4ad432b62898\nbuild(google/maps/roads/v1op): move go to monorepo pattern\n\nPiperOrigin-RevId: 299885195\n\nd66816518844ebbf63504c9e8dfc7133921dd2cd\nbuild(google/maps/roads/v1op): Add bazel build files to generate clients.\n\nPiperOrigin-RevId: 299851148\n\naf7dff701fabe029672168649c62356cf1bb43d0\nAdd LogPlayerReports and LogImpressions to Playable Locations service\n\nPiperOrigin-RevId: 299724050\n\nb6927fca808f38df32a642c560082f5bf6538ced\nUpdate BigQuery Connection API v1beta1 proto: added credential to CloudSqlProperties.\n\nPiperOrigin-RevId: 299503150\n\n91e1fb5ef9829c0c7a64bfa5bde330e6ed594378\nchore: update protobuf (protoc) version to 3.11.2\n\nPiperOrigin-RevId: 299404145\n\n30e36b4bee6749c4799f4fc1a51cc8f058ba167d\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 299399890\n\nffbb493674099f265693872ae250711b2238090c\nfeat: cloudbuild/v1 add new fields and annotate OUTPUT_OUT fields.\n\nPiperOrigin-RevId: 299397780\n\nbc973a15818e00c19e121959832676e9b7607456\nbazel: Fix broken common dependency\n\nPiperOrigin-RevId: 299397431\n\n71094a343e3b962e744aa49eb9338219537474e4\nchore: bigtable/admin/v2 publish retry config\n\nPiperOrigin-RevId: 299391875\n\n8f488efd7bda33885cb674ddd023b3678c40bd82\nfeat: Migrate logging to GAPIC v2; release new features.\n\nIMPORTANT: This is a breaking change for client libraries\nin all languages.\n\nCommitter: @lukesneeringer, @jskeet\nPiperOrigin-RevId: 299370279\n\n007605bf9ad3a1fd775014ebefbf7f1e6b31ee71\nUpdate API for bigqueryreservation v1beta1.\n- Adds flex capacity commitment plan to CapacityCommitment.\n- Adds methods for getting and updating BiReservations.\n- Adds methods for updating/splitting/merging CapacityCommitments.\n\nPiperOrigin-RevId: 299368059\n\n" } }, { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "99820243d348191bc9c634f2b48ddf65096285ed" + "template": { + "name": "node_library", + "origin": "synthtool.gcp", + "version": "2020.2.4" } } ], @@ -26,6 +27,15 @@ "language": "typescript", "generator": "gapic-generator-typescript" } + }, + { + "client": { + "source": "googleapis", + "apiName": "bigquerystorage", + "apiVersion": "v1", + "language": "typescript", + "generator": "gapic-generator-typescript" + } } ] } \ No newline at end of file diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index 564adc79dcb..24ecbba9777 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -23,7 +23,7 @@ # Run the gapic generator gapic = gcp.GAPICMicrogenerator() name = 'bigquerystorage' -versions = ['v1beta1'] +versions = ['v1beta1', 'v1'] for version in versions: library = gapic.typescript_library( name, @@ -34,7 +34,7 @@ 'package-name': f'@google-cloud/bigquery-storage', }, ) - s.copy(library, excludes=['package.json', 'README.md']) + s.copy(library, excludes=['package.json', 'README.md', 'src/index.ts']) # Copy common templates common_templates = gcp.CommonTemplates() templates = common_templates.node_library(source_location='build/src') diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 1f6ab3784e4..029b4d88171 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -17,10 +17,10 @@ // ** All changes to this file may be overwritten. ** /* eslint-disable node/no-missing-require, no-unused-vars */ -const storage = require('@google-cloud/bigquery-storage'); +const {BigQueryReadClient} = require('@google-cloud/bigquery-storage'); function main() { - const bigQueryStorageClient = new storage.BigQueryStorageClient(); + const bigQueryReadClient = new BigQueryReadClient(); } main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index d53a99905ac..5c67c093939 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -16,10 +16,11 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import {BigQueryStorageClient} from '@google-cloud/bigquery-storage'; +import {BigQueryReadClient} from '@google-cloud/bigquery-storage'; function main() { - new BigQueryStorageClient(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const bigQueryReadClient = new BigQueryReadClient(); } main(); diff --git a/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts b/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts new file mode 100644 index 00000000000..622a2a3d66e --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts @@ -0,0 +1,288 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protosTypes from '../protos/protos'; +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const bigqueryreadModule = require('../src'); + +import {PassThrough} from 'stream'; + +const FAKE_STATUS_CODE = 1; +class FakeError { + name: string; + message: string; + code: number; + constructor(n: number) { + this.name = 'fakeName'; + this.message = 'fake message'; + this.code = n; + } +} +const error = new FakeError(FAKE_STATUS_CODE); +export interface Callback { + (err: FakeError | null, response?: {} | null): void; +} + +export class Operation { + constructor() {} + promise() {} +} +function mockSimpleGrpcMethod( + expectedRequest: {}, + response: {} | null, + error: FakeError | null +) { + return (actualRequest: {}, options: {}, callback: Callback) => { + assert.deepStrictEqual(actualRequest, expectedRequest); + if (error) { + callback(error); + } else if (response) { + callback(null, response); + } else { + callback(null); + } + }; +} +function mockServerStreamingGrpcMethod( + expectedRequest: {}, + response: {} | null, + error: FakeError | null +) { + return (actualRequest: {}) => { + assert.deepStrictEqual(actualRequest, expectedRequest); + const mockStream = new PassThrough({ + objectMode: true, + transform: (chunk: {}, enc: {}, callback: Callback) => { + if (error) { + callback(error); + } else { + callback(null, response); + } + }, + }); + return mockStream; + }; +} +describe('v1.BigQueryReadClient', () => { + it('has servicePath', () => { + const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; + assert(servicePath); + }); + it('has apiEndpoint', () => { + const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; + assert(apiEndpoint); + }); + it('has port', () => { + const port = bigqueryreadModule.v1.BigQueryReadClient.port; + assert(port); + assert(typeof port === 'number'); + }); + it('should create a client with no option', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + assert(client); + }); + it('should create a client with gRPC fallback', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + fallback: true, + }); + assert(client); + }); + it('has initialize method and supports deferred initialization', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + await client.initialize(); + assert(client.bigQueryReadStub); + }); + it('has close method', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.close(); + }); + describe('createReadSession', () => { + it('invokes createReadSession without error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest = {}; + request.readSession = {}; + request.readSession.table = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.createReadSession(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes createReadSession with error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest = {}; + request.readSession = {}; + request.readSession.table = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( + request, + null, + error + ); + client.createReadSession(request, (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + }); + }); + }); + describe('splitReadStream', () => { + it('invokes splitReadStream without error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest = {}; + request.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( + request, + expectedResponse, + null + ); + client.splitReadStream(request, (err: {}, response: {}) => { + assert.ifError(err); + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + }); + + it('invokes splitReadStream with error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest = {}; + request.name = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( + request, + null, + error + ); + client.splitReadStream(request, (err: FakeError, response: {}) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + assert(typeof response === 'undefined'); + done(); + }); + }); + }); + describe('readRows', () => { + it('invokes readRows without error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest = {}; + request.readStream = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( + request, + expectedResponse, + null + ); + const stream = client.readRows(request); + stream.on('data', (response: {}) => { + assert.deepStrictEqual(response, expectedResponse); + done(); + }); + stream.on('error', (err: FakeError) => { + done(err); + }); + stream.write(); + }); + it('invokes readRows with error', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + // Initialize client before mocking + client.initialize(); + // Mock request + const request: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest = {}; + request.readStream = ''; + // Mock response + const expectedResponse = {}; + // Mock gRPC layer + client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( + request, + null, + error + ); + const stream = client.readRows(request); + stream.on('data', () => { + assert.fail(); + }); + stream.on('error', (err: FakeError) => { + assert(err instanceof FakeError); + assert.strictEqual(err.code, FAKE_STATUS_CODE); + done(); + }); + stream.write(); + }); + }); +}); diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js index 683c6ec0233..215b4b9acb9 100644 --- a/handwritten/bigquery-storage/webpack.config.js +++ b/handwritten/bigquery-storage/webpack.config.js @@ -17,8 +17,8 @@ const path = require('path'); module.exports = { entry: './src/index.ts', output: { - library: 'BigQueryStorage', - filename: './big-query-storage.js', + library: 'BigQueryRead', + filename: './big-query-read.js', }, node: { child_process: 'empty', From ad62818f093ba4fc3f5d3f78ccf19c9b2f828af8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 10 Apr 2020 20:57:04 +0200 Subject: [PATCH 026/333] chore(deps): update dependency gts to v2.0.0 (#48) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [gts](https://togithub.com/google/gts) | devDependencies | patch | [`2.0.0-alpha.9` -> `2.0.0`](https://renovatebot.com/diffs/npm/gts/2.0.0-alpha.9/2.0.0) | --- ### Release Notes
google/gts ### [`v2.0.0`](https://togithub.com/google/gts/blob/master/CHANGELOG.md#​200-httpswwwgithubcomgooglegtscomparev112v200-2020-04-02) [Compare Source](https://togithub.com/google/gts/compare/39a2705e51b4b6329a70f91f8293a2d7a363bf5d...v2.0.0) ##### ⚠ BREAKING CHANGES ⚠ This is a major rewrite of the tool. Based on community guidance, we've switched from using [tslint](https://palantir.github.io/tslint/) to [eslint](https://eslint.org/). _Please read all of the steps below to upgrade_. ##### Configuring `eslint` With the shift to `eslint`, `gts` now will format and lint JavaScript _as well_ as TypeScript. Upgrading will require a number of manual steps. To format JavaScript and TypeScript, you can run: $ npx gts fix To specify only TypeScript: $ npx gts fix '**/*.ts' ##### Delete `tslint.json` This file is no longer used, and can lead to confusion. ##### Create a `.eslintrc.json` Now that we're using eslint, you need to extend the eslint configuration baked into the module. Create a new file named `.eslintrc.json`, and paste the following: ```js { "extends": "./node_modules/gts" } ``` ##### Create a `.eslintignore` The `.eslintignore` file lets you ignore specific directories. This tool now lints and formats JavaScript, so it's _really_ important to ignore your build directory! Here is an example of a `.eslintignore` file: **/node_modules build/ ##### Rule changes The underlying linter was changed, so naturally there are going to be a variety of rule changes along the way. To see the full list, check out [.eslintrc.json](https://togithub.com/google/gts/blob/master/.eslintrc.json). ##### Require Node.js 10.x and up Node.js 8.x is now end of life - this module now requires Ndoe.js 10.x and up. ##### Features - add the eol-last rule ([#​425](https://www.github.com/google/gts/issues/425)) ([50ebd4d](https://www.github.com/google/gts/commit/50ebd4dbaf063615f4c025f567ca28076a734223)) - allow eslintrc to run over tsx files ([#​469](https://www.github.com/google/gts/issues/469)) ([a21db94](https://www.github.com/google/gts/commit/a21db94601def563952d677cb0980a12b6730f4c)) - disable global rule for checking TODO comments ([#​459](https://www.github.com/google/gts/issues/459)) ([96aa84a](https://www.github.com/google/gts/commit/96aa84a0a42181046daa248750cc8fef0c320619)) - override require-atomic-updates ([#​468](https://www.github.com/google/gts/issues/468)) ([8105c93](https://www.github.com/google/gts/commit/8105c9334ee5104b05f6b1b2f150e51419637262)) - prefer single quotes if possible ([#​475](https://www.github.com/google/gts/issues/475)) ([39a2705](https://www.github.com/google/gts/commit/39a2705e51b4b6329a70f91f8293a2d7a363bf5d)) - use eslint instead of tslint ([#​400](https://www.github.com/google/gts/issues/400)) ([b3096fb](https://www.github.com/google/gts/commit/b3096fbd5076d302d93c2307bf627e12c423e726)) ##### Bug Fixes - use .prettierrc.js ([#​437](https://www.github.com/google/gts/issues/437)) ([06efa84](https://www.github.com/google/gts/commit/06efa8444cdf1064b64f3e8d61ebd04f45d90b4c)) - **deps:** update dependency chalk to v4 ([#​477](https://www.github.com/google/gts/issues/477)) ([061d64e](https://www.github.com/google/gts/commit/061d64e29d37b93ce55228937cc100e05ddef352)) - **deps:** update dependency eslint-plugin-node to v11 ([#​426](https://www.github.com/google/gts/issues/426)) ([a394b7c](https://www.github.com/google/gts/commit/a394b7c1f80437f25017ca5c500b968ebb789ece)) - **deps:** update dependency execa to v4 ([#​427](https://www.github.com/google/gts/issues/427)) ([f42ef36](https://www.github.com/google/gts/commit/f42ef36709251553342e655e287e889df72ee3e3)) - **deps:** update dependency prettier to v2 ([#​464](https://www.github.com/google/gts/issues/464)) ([20ef43d](https://www.github.com/google/gts/commit/20ef43d566df17d3c93949ef7db3b72ee9123ca3)) - disable no-use-before-define ([#​431](https://www.github.com/google/gts/issues/431)) ([dea2c22](https://www.github.com/google/gts/commit/dea2c223d1d3a60a1786aa820eebb93be27016a7)) - **deps:** update dependency update-notifier to v4 ([#​403](https://www.github.com/google/gts/issues/403)) ([57393b7](https://www.github.com/google/gts/commit/57393b74c6cf299e8ae09311f0382226b8baa3e3)) - **deps:** upgrade to meow 6.x ([#​423](https://www.github.com/google/gts/issues/423)) ([8f93d00](https://www.github.com/google/gts/commit/8f93d0049337a832d9a22b6ae4e86fd41140ec56)) - align back to the google style guide ([#​440](https://www.github.com/google/gts/issues/440)) ([8bd78c4](https://www.github.com/google/gts/commit/8bd78c4c78526a72400f618a95a987d2a7c1a8db)) - disable empty-function check ([#​467](https://www.github.com/google/gts/issues/467)) ([6455d7a](https://www.github.com/google/gts/commit/6455d7a9d227320d3ffe1b00c9c739b846f339a8)) - drop support for node 8 ([#​422](https://www.github.com/google/gts/issues/422)) ([888c686](https://www.github.com/google/gts/commit/888c68692079065f38ce66ec84472f1f3311a050)) - emit .prettierrc.js with init ([#​462](https://www.github.com/google/gts/issues/462)) ([b114614](https://www.github.com/google/gts/commit/b114614d22ab5560d2d1dd5cb6695968cc80027b)) - enable trailing comma ([#​470](https://www.github.com/google/gts/issues/470)) ([6518f58](https://www.github.com/google/gts/commit/6518f5843d3093e3beb7d3371b56d9aecedf3924)) - include _.tsx and _.jsx in default fix command ([#​473](https://www.github.com/google/gts/issues/473)) ([0509780](https://www.github.com/google/gts/commit/050978005ad089d9b3b5d8895b25ea1175d75db2)) ##### [1.1.2](https://www.github.com/google/gts/compare/v1.1.1...v1.1.2) (2019-11-20) ##### Bug Fixes - **deps:** update to newest prettier (with support for optional chain) ([#​396](https://www.github.com/google/gts/issues/396)) ([ce8ad06](https://www.github.com/google/gts/commit/ce8ad06c8489c44a9e2ed5292382637b3ebb7601)) ##### [1.1.1](https://www.github.com/google/gts/compare/v1.1.0...v1.1.1) (2019-11-11) ##### Bug Fixes - **deps:** update dependency chalk to v3 ([#​389](https://www.github.com/google/gts/issues/389)) ([1ce0f45](https://www.github.com/google/gts/commit/1ce0f450677e143a27efc39def617d13c66503e8)) - **deps:** update dependency inquirer to v7 ([#​377](https://www.github.com/google/gts/issues/377)) ([bf2c349](https://www.github.com/google/gts/commit/bf2c349b2208ac63e551542599ac9cd27b461338)) - **deps:** update dependency rimraf to v3 ([#​374](https://www.github.com/google/gts/issues/374)) ([2058eaa](https://www.github.com/google/gts/commit/2058eaa682f4baae978b469fd708d1f866e7da74)) - **deps:** update dependency write-file-atomic to v3 ([#​353](https://www.github.com/google/gts/issues/353)) ([59e6aa8](https://www.github.com/google/gts/commit/59e6aa8580a2f8e9457d2d2b6fa9e18e86347592))
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d3919de0baf..cf5da124444 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -37,7 +37,7 @@ "eslint-config-prettier": "^6.10.0", "eslint-plugin-node": "^11.0.0", "eslint-plugin-prettier": "^3.1.2", - "gts": "2.0.0-alpha.9", + "gts": "2.0.0", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", From d6afa0bf9434b5f6a2e3c7fc926364e3fab28a81 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Fri, 10 Apr 2020 18:49:28 -0700 Subject: [PATCH 027/333] fix: remove eslint, update gax, fix generated protos, run the generator (#49) Run the latest version of the generator, update google-gax, update gts, and remove direct dependencies on eslint. --- handwritten/bigquery-storage/.jsdoc.js | 2 +- handwritten/bigquery-storage/.prettierrc.js | 2 +- handwritten/bigquery-storage/README.md | 5 +- handwritten/bigquery-storage/package.json | 14 +- handwritten/bigquery-storage/protos/protos.js | 2 +- .../src/v1/big_query_read_client.ts | 189 +++--- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../src/v1beta1/big_query_storage_client.ts | 12 +- handwritten/bigquery-storage/synth.metadata | 17 +- .../system-test/fixtures/sample/src/index.js | 4 +- .../system-test/fixtures/sample/src/index.ts | 3 +- .../test/gapic-big_query_read-v1.ts | 288 -------- .../test/gapic_big_query_read_v1.ts | 623 ++++++++++++++++++ .../test/gapic_big_query_storage_v1beta1.ts | 10 +- 14 files changed, 757 insertions(+), 416 deletions(-) delete mode 100644 handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index e8a4b174718..ac0f1658a27 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2019 Google, LLC.', + copyright: 'Copyright 2020 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/.prettierrc.js b/handwritten/bigquery-storage/.prettierrc.js index 08cba3775be..d1b95106f4c 100644 --- a/handwritten/bigquery-storage/.prettierrc.js +++ b/handwritten/bigquery-storage/.prettierrc.js @@ -4,7 +4,7 @@ // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index cbd319b1dc1..4dc17dfd9d0 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -63,10 +63,9 @@ const avro = require('avsc'); // See reference documentation at // https://cloud.google.com/bigquery/docs/reference/storage -const bqStorage = require('@google-cloud/bigquery-storage').v1beta1 - .BigQueryStorageClient; +const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage'); -const client = new bqStorage(); +const client = new BigQueryStorageClient(); async function bigqueryStorageQuickstart() { // Get current project ID. The read session is created in this project. diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index cf5da124444..cdf7c977e7c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -17,27 +17,23 @@ "docs": "jsdoc -c .jsdoc.js", "predocs-test": "npm run docs", "docs-test": "linkinator docs", - "fix": "gts fix && eslint samples --fix", - "prelint": "cd samples; npm link ../; npm i", - "lint": "gts check && eslint samples", + "fix": "gts fix", + "prelint": "cd samples; npm link ../; npm install", + "lint": "gts fix", "prepare": "npm run compile-protos && npm run compile", "system-test": "c8 mocha build/system-test", "test": "c8 mocha build/test", "samples-test": "cd samples/ && npm link ../ && npm test && cd ../" }, "dependencies": { - "google-gax": "^2.0.1" + "google-gax": "^2.1.0" }, "devDependencies": { "@types/mocha": "^7.0.1", "@types/node": "^13.7.1", "@types/sinon": "^9.0.0", "c8": "^7.1.0", - "eslint": "^6.8.0", - "eslint-config-prettier": "^6.10.0", - "eslint-plugin-node": "^11.0.0", - "eslint-plugin-prettier": "^3.1.2", - "gts": "2.0.0", + "gts": "^2.0.0", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 911efd69b28..402f59277c5 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -28,7 +28,7 @@ var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; // Exported root namespace - var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + var $root = $protobuf.roots._google_cloud_bigquery_storage_1_1_0_protos || ($protobuf.roots._google_cloud_bigquery_storage_1_1_0_protos = {}); $root.google = (function() { diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 069be0386c0..ba031a72a85 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,16 +17,10 @@ // ** All changes to this file may be overwritten. ** import * as gax from 'google-gax'; -import { - APICallback, - Callback, - CallOptions, - Descriptors, - ClientOptions, -} from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; -import * as protosTypes from '../../protos/protos'; +import * as protos from '../../protos/protos'; import * as gapicConfig from './big_query_read_client_config.json'; const version = require('../../../package.json').version; @@ -39,9 +33,6 @@ const version = require('../../../package.json').version; * @memberof v1 */ export class BigQueryReadClient { - private _descriptors: Descriptors = {page: {}, stream: {}, longrunning: {}}; - private _innerApiCalls: {[name: string]: Function}; - private _pathTemplates: {[name: string]: gax.PathTemplate}; private _terminated = false; private _opts: ClientOptions; private _gaxModule: typeof gax | typeof gax.fallback; @@ -49,6 +40,14 @@ export class BigQueryReadClient { private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; bigQueryReadStub?: Promise<{[name: string]: Function}>; /** @@ -140,13 +139,16 @@ export class BigQueryReadClient { 'protos.json' ); this._protos = this._gaxGrpc.loadProto( - opts.fallback ? require('../../protos/protos.json') : nodejsProtoPath + opts.fallback + ? // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../../protos/protos.json') + : nodejsProtoPath ); // This API contains "path templates"; forward-slash-separated // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. - this._pathTemplates = { + this.pathTemplates = { readSessionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}' ), @@ -157,7 +159,7 @@ export class BigQueryReadClient { // Some of the methods on this service provide streaming responses. // Provide descriptors for these. - this._descriptors.stream = { + this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( gax.StreamType.SERVER_STREAMING ), @@ -174,7 +176,7 @@ export class BigQueryReadClient { // Set up a dictionary of "inner API calls"; the core implementation // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. - this._innerApiCalls = {}; + this.innerApiCalls = {}; } /** @@ -201,7 +203,7 @@ export class BigQueryReadClient { ? (this._protos as protobuf.Root).lookupService( 'google.cloud.bigquery.storage.v1.BigQueryRead' ) - : // tslint:disable-next-line no-any + : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, this._opts ) as Promise<{[method: string]: Function}>; @@ -213,15 +215,14 @@ export class BigQueryReadClient { 'readRows', 'splitReadStream', ]; - for (const methodName of bigQueryReadStubMethods) { - const innerCallPromise = this.bigQueryReadStub.then( + const callPromise = this.bigQueryReadStub.then( stub => (...args: Array<{}>) => { if (this._terminated) { return Promise.reject('The client has already been closed.'); } - // eslint-disable-next-line prefer-spread - return stub[methodName].apply(stub, args); + const func = stub[methodName]; + return func.apply(stub, args); }, (err: Error | null | undefined) => () => { throw err; @@ -229,20 +230,14 @@ export class BigQueryReadClient { ); const apiCall = this._gaxModule.createApiCall( - innerCallPromise, + callPromise, this._defaults[methodName], - this._descriptors.page[methodName] || - this._descriptors.stream[methodName] || - this._descriptors.longrunning[methodName] + this.descriptors.page[methodName] || + this.descriptors.stream[methodName] || + this.descriptors.longrunning[methodName] ); - this._innerApiCalls[methodName] = ( - argument: {}, - callOptions?: CallOptions, - callback?: APICallback - ) => { - return apiCall(argument, callOptions, callback); - }; + this.innerApiCalls[methodName] = apiCall; } return this.bigQueryReadStub; @@ -303,26 +298,37 @@ export class BigQueryReadClient { // -- Service calls -- // ------------------- createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + protos.google.cloud.bigquery.storage.v1.IReadSession, ( - | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest | undefined ), {} | undefined ] >; createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.cloud.bigquery.storage.v1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -369,26 +375,28 @@ export class BigQueryReadClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ createReadSession( - request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.cloud.bigquery.storage.v1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.cloud.bigquery.storage.v1.IReadSession, - | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1.IReadSession, + protos.google.cloud.bigquery.storage.v1.IReadSession, ( - | protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest | undefined ), {} | undefined @@ -411,29 +419,40 @@ export class BigQueryReadClient { 'read_session.table': request.readSession!.table || '', }); this.initialize(); - return this._innerApiCalls.createReadSession(request, options, callback); + return this.innerApiCalls.createReadSession(request, options, callback); } splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, options?: gax.CallOptions ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest | undefined ), {} | undefined ] >; splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, options: gax.CallOptions, callback: Callback< - protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined > ): void; /** @@ -469,26 +488,28 @@ export class BigQueryReadClient { * The promise has a method named "cancel" which cancels the ongoing API call. */ splitReadStream( - request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, optionsOrCallback?: | gax.CallOptions | Callback< - protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined >, callback?: Callback< - protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null | undefined, - {} | undefined + {} | null | undefined > ): Promise< [ - protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, ( - | protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest | undefined ), {} | undefined @@ -511,7 +532,7 @@ export class BigQueryReadClient { name: request.name || '', }); this.initialize(); - return this._innerApiCalls.splitReadStream(request, options, callback); + return this.innerApiCalls.splitReadStream(request, options, callback); } /** @@ -537,7 +558,7 @@ export class BigQueryReadClient { * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. */ readRows( - request?: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest, + request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, options?: gax.CallOptions ): gax.CancellableStream { request = request || {}; @@ -550,7 +571,7 @@ export class BigQueryReadClient { read_stream: request.readStream || '', }); this.initialize(); - return this._innerApiCalls.readRows(request, options); + return this.innerApiCalls.readRows(request, options); } // -------------------- @@ -566,10 +587,10 @@ export class BigQueryReadClient { * @returns {string} Resource name string. */ readSessionPath(project: string, location: string, session: string) { - return this._pathTemplates.readSessionPathTemplate.render({ - project, - location, - session, + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, }); } @@ -581,7 +602,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the project. */ matchProjectFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .project; } @@ -593,7 +614,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the location. */ matchLocationFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .location; } @@ -605,7 +626,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the session. */ matchSessionFromReadSessionName(readSessionName: string) { - return this._pathTemplates.readSessionPathTemplate.match(readSessionName) + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) .session; } @@ -624,11 +645,11 @@ export class BigQueryReadClient { session: string, stream: string ) { - return this._pathTemplates.readStreamPathTemplate.render({ - project, - location, - session, - stream, + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, }); } @@ -640,7 +661,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the project. */ matchProjectFromReadStreamName(readStreamName: string) { - return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) .project; } @@ -652,7 +673,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the location. */ matchLocationFromReadStreamName(readStreamName: string) { - return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) .location; } @@ -664,7 +685,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the session. */ matchSessionFromReadStreamName(readStreamName: string) { - return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) .session; } @@ -676,7 +697,7 @@ export class BigQueryReadClient { * @returns {string} A string representing the stream. */ matchStreamFromReadStreamName(readStreamName: string) { - return this._pathTemplates.readStreamPathTemplate.match(readStreamName) + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) .stream; } diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index 5eae3fb5e73..c882095873b 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index f464c061b9f..89d815f9e06 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -807,9 +807,9 @@ export class BigQueryStorageClient { */ readSessionPath(project: string, location: string, session: string) { return this.pathTemplates.readSessionPathTemplate.render({ - project, - location, - session, + project: project, + location: location, + session: session, }); } @@ -859,9 +859,9 @@ export class BigQueryStorageClient { */ streamPath(project: string, location: string, stream: string) { return this.pathTemplates.streamPathTemplate.render({ - project, - location, - stream, + project: project, + location: location, + stream: stream, }); } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 862f3735b43..daa2e456f0e 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,20 +1,11 @@ { - "updateTime": "2020-03-11T21:35:37.421495Z", + "updateTime": "2020-04-10T23:55:16.301945Z", "sources": [ { "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "80d2bd2c652a5e213302041b0620aff423132589", - "internalRef": "300393997", - "log": "80d2bd2c652a5e213302041b0620aff423132589\nEnable proto annotation and gapic v2 for talent API.\n\nPiperOrigin-RevId: 300393997\n\n85e454be7a353f7fe1bf2b0affb753305785b872\ndocs(google/maps/roads): remove mention of nonexported api\n\nPiperOrigin-RevId: 300367734\n\nbf839ae632e0f263a729569e44be4b38b1c85f9c\nAdding protocol buffer annotations and updated config info for v1 and v2.\n\nPiperOrigin-RevId: 300276913\n\n309b899ca18a4c604bce63882a161d44854da549\nPublish `Backup` APIs and protos.\n\nPiperOrigin-RevId: 300246038\n\neced64c3f122421350b4aca68a28e89121d20db8\nadd PHP client libraries\n\nPiperOrigin-RevId: 300193634\n\n7727af0e39df1ae9ad715895c8576d7b65cf6c6d\nfeat: use the latest gapic-generator and protoc-java-resource-name-plugin in googleapis/WORKSPACE.\n\nPiperOrigin-RevId: 300188410\n\n2a25aa351dd5b5fe14895266aff5824d90ce757b\nBreaking change: remove the ProjectOrTenant resource and its references.\n\nPiperOrigin-RevId: 300182152\n\na499dbb28546379415f51803505cfb6123477e71\nUpdate web risk v1 gapic config and BUILD file.\n\nPiperOrigin-RevId: 300152177\n\n52701da10fec2a5f9796e8d12518c0fe574488fe\nFix: apply appropriate namespace/package options for C#, PHP and Ruby.\n\nPiperOrigin-RevId: 300123508\n\n365c029b8cdb63f7751b92ab490f1976e616105c\nAdd CC targets to the kms protos.\n\nThese are needed by go/tink.\n\nPiperOrigin-RevId: 300038469\n\n4ba9aa8a4a1413b88dca5a8fa931824ee9c284e6\nExpose logo recognition API proto for GA.\n\nPiperOrigin-RevId: 299971671\n\n1c9fc2c9e03dadf15f16b1c4f570955bdcebe00e\nAdding ruby_package option to accessapproval.proto for the Ruby client libraries generation.\n\nPiperOrigin-RevId: 299955924\n\n1cc6f0a7bfb147e6f2ede911d9b01e7a9923b719\nbuild(google/maps/routes): generate api clients\n\nPiperOrigin-RevId: 299955905\n\n29a47c965aac79e3fe8e3314482ca0b5967680f0\nIncrease timeout to 1hr for method `dropRange` in bigtable/admin/v2, which is\nsynced with the timeout setting in gapic_yaml.\n\nPiperOrigin-RevId: 299917154\n\n8f631c4c70a60a9c7da3749511ee4ad432b62898\nbuild(google/maps/roads/v1op): move go to monorepo pattern\n\nPiperOrigin-RevId: 299885195\n\nd66816518844ebbf63504c9e8dfc7133921dd2cd\nbuild(google/maps/roads/v1op): Add bazel build files to generate clients.\n\nPiperOrigin-RevId: 299851148\n\naf7dff701fabe029672168649c62356cf1bb43d0\nAdd LogPlayerReports and LogImpressions to Playable Locations service\n\nPiperOrigin-RevId: 299724050\n\nb6927fca808f38df32a642c560082f5bf6538ced\nUpdate BigQuery Connection API v1beta1 proto: added credential to CloudSqlProperties.\n\nPiperOrigin-RevId: 299503150\n\n91e1fb5ef9829c0c7a64bfa5bde330e6ed594378\nchore: update protobuf (protoc) version to 3.11.2\n\nPiperOrigin-RevId: 299404145\n\n30e36b4bee6749c4799f4fc1a51cc8f058ba167d\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 299399890\n\nffbb493674099f265693872ae250711b2238090c\nfeat: cloudbuild/v1 add new fields and annotate OUTPUT_OUT fields.\n\nPiperOrigin-RevId: 299397780\n\nbc973a15818e00c19e121959832676e9b7607456\nbazel: Fix broken common dependency\n\nPiperOrigin-RevId: 299397431\n\n71094a343e3b962e744aa49eb9338219537474e4\nchore: bigtable/admin/v2 publish retry config\n\nPiperOrigin-RevId: 299391875\n\n8f488efd7bda33885cb674ddd023b3678c40bd82\nfeat: Migrate logging to GAPIC v2; release new features.\n\nIMPORTANT: This is a breaking change for client libraries\nin all languages.\n\nCommitter: @lukesneeringer, @jskeet\nPiperOrigin-RevId: 299370279\n\n007605bf9ad3a1fd775014ebefbf7f1e6b31ee71\nUpdate API for bigqueryreservation v1beta1.\n- Adds flex capacity commitment plan to CapacityCommitment.\n- Adds methods for getting and updating BiReservations.\n- Adds methods for updating/splitting/merging CapacityCommitments.\n\nPiperOrigin-RevId: 299368059\n\n" - } - }, - { - "template": { - "name": "node_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "6f32150677c9784f3c3a7e1949472bd29c9d72c5" } } ], diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 029b4d88171..a0f6f03ee57 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -17,10 +17,10 @@ // ** All changes to this file may be overwritten. ** /* eslint-disable node/no-missing-require, no-unused-vars */ -const {BigQueryReadClient} = require('@google-cloud/bigquery-storage'); +const storage = require('@google-cloud/bigquery-storage'); function main() { - const bigQueryReadClient = new BigQueryReadClient(); + const bigQueryReadClient = new storage.BigQueryReadClient(); } main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 5c67c093939..5edf8f362ab 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -19,8 +19,7 @@ import {BigQueryReadClient} from '@google-cloud/bigquery-storage'; function main() { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const bigQueryReadClient = new BigQueryReadClient(); + new BigQueryReadClient(); } main(); diff --git a/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts b/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts deleted file mode 100644 index 622a2a3d66e..00000000000 --- a/handwritten/bigquery-storage/test/gapic-big_query_read-v1.ts +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protosTypes from '../protos/protos'; -import * as assert from 'assert'; -import {describe, it} from 'mocha'; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const bigqueryreadModule = require('../src'); - -import {PassThrough} from 'stream'; - -const FAKE_STATUS_CODE = 1; -class FakeError { - name: string; - message: string; - code: number; - constructor(n: number) { - this.name = 'fakeName'; - this.message = 'fake message'; - this.code = n; - } -} -const error = new FakeError(FAKE_STATUS_CODE); -export interface Callback { - (err: FakeError | null, response?: {} | null): void; -} - -export class Operation { - constructor() {} - promise() {} -} -function mockSimpleGrpcMethod( - expectedRequest: {}, - response: {} | null, - error: FakeError | null -) { - return (actualRequest: {}, options: {}, callback: Callback) => { - assert.deepStrictEqual(actualRequest, expectedRequest); - if (error) { - callback(error); - } else if (response) { - callback(null, response); - } else { - callback(null); - } - }; -} -function mockServerStreamingGrpcMethod( - expectedRequest: {}, - response: {} | null, - error: FakeError | null -) { - return (actualRequest: {}) => { - assert.deepStrictEqual(actualRequest, expectedRequest); - const mockStream = new PassThrough({ - objectMode: true, - transform: (chunk: {}, enc: {}, callback: Callback) => { - if (error) { - callback(error); - } else { - callback(null, response); - } - }, - }); - return mockStream; - }; -} -describe('v1.BigQueryReadClient', () => { - it('has servicePath', () => { - const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; - assert(servicePath); - }); - it('has apiEndpoint', () => { - const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; - assert(apiEndpoint); - }); - it('has port', () => { - const port = bigqueryreadModule.v1.BigQueryReadClient.port; - assert(port); - assert(typeof port === 'number'); - }); - it('should create a client with no option', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient(); - assert(client); - }); - it('should create a client with gRPC fallback', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - fallback: true, - }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryReadStub, undefined); - await client.initialize(); - assert(client.bigQueryReadStub); - }); - it('has close method', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.close(); - }); - describe('createReadSession', () => { - it('invokes createReadSession without error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest = {}; - request.readSession = {}; - request.readSession.table = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.createReadSession(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes createReadSession with error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest = {}; - request.readSession = {}; - request.readSession.table = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.createReadSession = mockSimpleGrpcMethod( - request, - null, - error - ); - client.createReadSession(request, (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); - describe('splitReadStream', () => { - it('invokes splitReadStream without error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest = {}; - request.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( - request, - expectedResponse, - null - ); - client.splitReadStream(request, (err: {}, response: {}) => { - assert.ifError(err); - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - }); - - it('invokes splitReadStream with error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest = {}; - request.name = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.splitReadStream = mockSimpleGrpcMethod( - request, - null, - error - ); - client.splitReadStream(request, (err: FakeError, response: {}) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - assert(typeof response === 'undefined'); - done(); - }); - }); - }); - describe('readRows', () => { - it('invokes readRows without error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest = {}; - request.readStream = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( - request, - expectedResponse, - null - ); - const stream = client.readRows(request); - stream.on('data', (response: {}) => { - assert.deepStrictEqual(response, expectedResponse); - done(); - }); - stream.on('error', (err: FakeError) => { - done(err); - }); - stream.write(); - }); - it('invokes readRows with error', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - // Initialize client before mocking - client.initialize(); - // Mock request - const request: protosTypes.google.cloud.bigquery.storage.v1.IReadRowsRequest = {}; - request.readStream = ''; - // Mock response - const expectedResponse = {}; - // Mock gRPC layer - client._innerApiCalls.readRows = mockServerStreamingGrpcMethod( - request, - null, - error - ); - const stream = client.readRows(request); - stream.on('data', () => { - assert.fail(); - }); - stream.on('error', (err: FakeError) => { - assert(err instanceof FakeError); - assert.strictEqual(err.code, FAKE_STATUS_CODE); - done(); - }); - stream.write(); - }); - }); -}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts new file mode 100644 index 00000000000..8e8aee52a2b --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -0,0 +1,623 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigqueryreadModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message).toObject( + instance as protobuf.Message, + {defaults: true} + ); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubServerStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // write something to the stream to trigger transformStub and send the response back to the client + setImmediate(() => { + mockStream.write({}); + }); + setImmediate(() => { + mockStream.end(); + }); + return sinon.stub().returns(mockStream); +} + +describe('v1.BigQueryReadClient', () => { + it('has servicePath', () => { + const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigqueryreadModule.v1.BigQueryReadClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + await client.initialize(); + assert(client.bigQueryReadStub); + }); + + it('has close method', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.close(); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createReadSession', () => { + it('invokes createReadSession without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession = {}; + request.readSession.table = ''; + const expectedHeaderRequestParams = 'read_session.table='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); + const [response] = await client.createReadSession(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes createReadSession without error using callback', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession = {}; + request.readSession.table = ''; + const expectedHeaderRequestParams = 'read_session.table='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.createReadSession( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IReadSession | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes createReadSession with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession = {}; + request.readSession.table = ''; + const expectedHeaderRequestParams = 'read_session.table='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createReadSession = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.createReadSession(request); + }, expectedError); + assert( + (client.innerApiCalls.createReadSession as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('splitReadStream', () => { + it('invokes splitReadStream without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); + const [response] = await client.splitReadStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes splitReadStream without error using callback', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.splitReadStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes splitReadStream with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.splitReadStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.splitReadStream(request); + }, expectedError); + assert( + (client.innerApiCalls.splitReadStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('readRows', () => { + it('invokes readRows without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + request.readStream = ''; + const expectedHeaderRequestParams = 'read_stream='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.readRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions) + ); + }); + + it('invokes readRows with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + request.readStream = ''; + const expectedHeaderRequestParams = 'read_stream='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.readRows = stubServerStreamingCall( + undefined, + expectedError + ); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.innerApiCalls.readRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions) + ); + }); + }); + + describe('Path templates', () => { + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readStream', () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 47eca9a12c9..7321fc43ee4 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -271,7 +271,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - assert.rejects(async () => { + await assert.rejects(async () => { await client.createReadSession(request); }, expectedError); assert( @@ -390,7 +390,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - assert.rejects(async () => { + await assert.rejects(async () => { await client.batchCreateReadSessionStreams(request); }, expectedError); assert( @@ -507,7 +507,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - assert.rejects(async () => { + await assert.rejects(async () => { await client.finalizeStream(request); }, expectedError); assert( @@ -624,7 +624,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - assert.rejects(async () => { + await assert.rejects(async () => { await client.splitReadStream(request); }, expectedError); assert( @@ -722,7 +722,7 @@ describe('v1beta1.BigQueryStorageClient', () => { reject(err); }); }); - assert.rejects(async () => { + await assert.rejects(async () => { await promise; }, expectedError); assert( From 052a6df6ee2a9d1f80b98e0eab5822d3b9975de9 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Sat, 11 Apr 2020 22:20:11 -0700 Subject: [PATCH 028/333] build: remove unused codecov config (#50) --- handwritten/bigquery-storage/codecov.yaml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 handwritten/bigquery-storage/codecov.yaml diff --git a/handwritten/bigquery-storage/codecov.yaml b/handwritten/bigquery-storage/codecov.yaml deleted file mode 100644 index 5724ea9478d..00000000000 --- a/handwritten/bigquery-storage/codecov.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -codecov: - ci: - - source.cloud.google.com From 7b3596dada155a200fea97bc9188799f0d0390f8 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 13 Apr 2020 09:32:42 -0700 Subject: [PATCH 029/333] docs: update doc urls (#51) --- .../google/cloud/bigquery/storage/v1/arrow.proto | 2 +- .../cloud/bigquery/storage/v1/stream.proto | 2 +- handwritten/bigquery-storage/synth.metadata | 16 +++++++++++++++- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 90add9780e1..1c54eeab07f 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -27,7 +27,7 @@ option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; // Arrow schema as specified in // https://arrow.apache.org/docs/python/api/datatypes.html // and serialized to bytes using IPC: -// https://arrow.apache.org/docs/ipc.html. +// https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc // // See code samples on how this message can be deserialized. message ArrowSchema { diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 19d4231da5b..febad03675e 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -99,7 +99,7 @@ message ReadSession { } // Immutable. Table that this ReadSession is reading from, in the form - // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id} + // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` string table = 6 [ (google.api.field_behavior) = IMMUTABLE, (google.api.resource_reference) = { diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index daa2e456f0e..7ca8503ec55 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -1,6 +1,20 @@ { - "updateTime": "2020-04-10T23:55:16.301945Z", "sources": [ + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", + "sha": "b5b9492a0c4b86b868a2b33c5c350301db29cc65" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "1bd77e8ce6f953ac641af7966d0c52646afc16a8", + "internalRef": "305974465" + } + }, { "git": { "name": "synthtool", From 31b4ee5ab4335cac7162947cc5e90c8b310ef632 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 13 Apr 2020 14:57:22 -0700 Subject: [PATCH 030/333] chore: update lint ignore files (#52) --- handwritten/bigquery-storage/.eslintignore | 3 ++- handwritten/bigquery-storage/.prettierignore | 9 ++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/.eslintignore b/handwritten/bigquery-storage/.eslintignore index 09b31fe735a..9340ad9b86d 100644 --- a/handwritten/bigquery-storage/.eslintignore +++ b/handwritten/bigquery-storage/.eslintignore @@ -1,5 +1,6 @@ **/node_modules -src/**/doc/* +**/coverage +test/fixtures build/ docs/ protos/ diff --git a/handwritten/bigquery-storage/.prettierignore b/handwritten/bigquery-storage/.prettierignore index f6fac98b0a8..9340ad9b86d 100644 --- a/handwritten/bigquery-storage/.prettierignore +++ b/handwritten/bigquery-storage/.prettierignore @@ -1,3 +1,6 @@ -node_modules/* -samples/node_modules/* -src/**/doc/* +**/node_modules +**/coverage +test/fixtures +build/ +docs/ +protos/ From d354f1537722d4d21d18e5e3e1995f1cb5b36f02 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 13 Apr 2020 19:28:47 -0700 Subject: [PATCH 031/333] chore: remove tslint.json (#53) --- handwritten/bigquery-storage/tslint.json | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 handwritten/bigquery-storage/tslint.json diff --git a/handwritten/bigquery-storage/tslint.json b/handwritten/bigquery-storage/tslint.json deleted file mode 100644 index 617dc975bae..00000000000 --- a/handwritten/bigquery-storage/tslint.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "gts/tslint.json" -} From ec5c8dea834533d481497c8975c722a3715a5b83 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Tue, 14 Apr 2020 09:55:17 -0700 Subject: [PATCH 032/333] chore: remove unused dev packages (#54) --- handwritten/bigquery-storage/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index cdf7c977e7c..80ea15bf736 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -41,7 +41,6 @@ "mocha": "^7.0.1", "null-loader": "^3.0.0", "pack-n-play": "^1.0.0-2", - "prettier": "^1.19.1", "sinon": "^9.0.1", "ts-loader": "^6.2.1", "typescript": "^3.8.3", From 4c1fb5bd72016b2592c80b63ce2668d0d080d9f7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 15 Apr 2020 17:31:48 +0200 Subject: [PATCH 033/333] chore(deps): update dependency ts-loader to v7 (#55) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [ts-loader](https://togithub.com/TypeStrong/ts-loader) | devDependencies | major | [`^6.2.1` -> `^7.0.0`](https://renovatebot.com/diffs/npm/ts-loader/6.2.2/7.0.0) | --- ### Release Notes
TypeStrong/ts-loader ### [`v7.0.0`](https://togithub.com/TypeStrong/ts-loader/blob/master/CHANGELOG.md#v700) [Compare Source](https://togithub.com/TypeStrong/ts-loader/compare/v6.2.2...v7.0.0) - [Project reference support enhancements](https://togithub.com/TypeStrong/ts-loader/pull/1076) - thanks [@​sheetalkamat](https://togithub.com/sheetalkamat)! - Following the end of life of Node 8, `ts-loader` no longer supports Node 8 **BREAKING CHANGE**
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 80ea15bf736..2dd3c73635b 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -42,7 +42,7 @@ "null-loader": "^3.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^9.0.1", - "ts-loader": "^6.2.1", + "ts-loader": "^7.0.0", "typescript": "^3.8.3", "webpack": "^4.41.6", "webpack-cli": "^3.3.11" From 85e39ab509318b27ae8cdf4483cf5e1c14d83bb6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 15 Apr 2020 18:31:29 +0200 Subject: [PATCH 034/333] chore(deps): update dependency null-loader to v4 (#56) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [null-loader](https://togithub.com/webpack-contrib/null-loader) | devDependencies | major | [`^3.0.0` -> `^4.0.0`](https://renovatebot.com/diffs/npm/null-loader/3.0.0/4.0.0) | --- ### Release Notes
webpack-contrib/null-loader ### [`v4.0.0`](https://togithub.com/webpack-contrib/null-loader/blob/master/CHANGELOG.md#​400-httpsgithubcomwebpack-contribnull-loadercomparev300v400-2020-04-15) [Compare Source](https://togithub.com/webpack-contrib/null-loader/compare/v3.0.0...v4.0.0) ##### Bug Fixes - support `webpack@5` ##### ⚠ BREAKING CHANGES - minimum required Nodejs version is `10.13`
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2dd3c73635b..c430768b059 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -39,7 +39,7 @@ "jsdoc-region-tag": "^1.0.4", "linkinator": "^2.0.1", "mocha": "^7.0.1", - "null-loader": "^3.0.0", + "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^9.0.1", "ts-loader": "^7.0.0", From 07c9907cc0eafdfe4486f6fd79fbb713992970cc Mon Sep 17 00:00:00 2001 From: David Supplee Date: Fri, 17 Apr 2020 13:53:36 -0700 Subject: [PATCH 035/333] chore: add client_documentation (#58) --- handwritten/bigquery-storage/.repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index e417e6a9678..09762cf5274 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -2,6 +2,7 @@ "name": "bigquerystorage", "name_pretty": "Google BigQuery Storage", "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", + "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", "issue_tracker": "https://b.corp.google.com/savedsearches/559654", "release_level": "beta", "language": "nodejs", From 145446d1628cb16e4713bf26697052ad28d5891b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 17 Apr 2020 16:12:06 -0700 Subject: [PATCH 036/333] chore: update linting (#57) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/e2decde6-d852-4ee4-a679-2a536210319e/targets --- handwritten/bigquery-storage/synth.metadata | 8 ++++---- handwritten/bigquery-storage/system-test/install.ts | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7ca8503ec55..03cfbcff61d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,22 +4,22 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "b5b9492a0c4b86b868a2b33c5c350301db29cc65" + "sha": "6b00060ca9bbd07cc540e7f29eb33404c58cdccf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "1bd77e8ce6f953ac641af7966d0c52646afc16a8", - "internalRef": "305974465" + "sha": "dea1f52034d188ca371c3a70c716ca2a6b3f2aa1", + "internalRef": "306298030" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6f32150677c9784f3c3a7e1949472bd29c9d72c5" + "sha": "682c0c37d1054966ca662a44259e96cc7aea4413" } } ], diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index c4d80e9c0c8..4c1ba3eb79a 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -21,7 +21,7 @@ import {readFileSync} from 'fs'; import {describe, it} from 'mocha'; describe('typescript consumer tests', () => { - it('should have correct type signature for typescript users', async function() { + it('should have correct type signature for typescript users', async function () { this.timeout(300000); const options = { packageDir: process.cwd(), // path to your module. @@ -35,7 +35,7 @@ describe('typescript consumer tests', () => { await packNTest(options); // will throw upon error. }); - it('should have correct type signature for javascript users', async function() { + it('should have correct type signature for javascript users', async function () { this.timeout(300000); const options = { packageDir: process.cwd(), // path to your module. From fd2889b3d3dbee1ff92dbcb14621153f4032c807 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 20 Apr 2020 15:40:29 -0700 Subject: [PATCH 037/333] docs: add reference doc link (#60) --- handwritten/bigquery-storage/README.md | 7 +++++-- handwritten/bigquery-storage/synth.metadata | 8 ++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 4dc17dfd9d0..6e66eb377d7 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -14,7 +14,7 @@ Client for the BigQuery Storage API - +* [Google BigQuery Storage Node.js Client API Reference][client-docs] * [Google BigQuery Storage Documentation][product-docs] * [github.com/googleapis/nodejs-bigquery-storage](https://github.com/googleapis/nodejs-bigquery-storage) @@ -190,6 +190,9 @@ has instructions for running the samples. +The [Google BigQuery Storage Node.js Client API Reference][client-docs] documentation +also contains samples. + ## Supported Node.js Versions Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). @@ -245,7 +248,7 @@ Apache Version 2.0 See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/LICENSE) - +[client-docs]: https://googleapis.dev/nodejs/bigquerystorage/latest [product-docs]: https://cloud.google.com/bigquery/docs/reference/storage [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png [projects]: https://console.cloud.google.com/project diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 03cfbcff61d..dc56cff4f20 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,22 +4,22 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "6b00060ca9bbd07cc540e7f29eb33404c58cdccf" + "sha": "aa16f9c028724dbab16ece815408771a36935dee" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dea1f52034d188ca371c3a70c716ca2a6b3f2aa1", - "internalRef": "306298030" + "sha": "42ee97c1b93a0e3759bbba3013da309f670a90ab", + "internalRef": "307114445" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "682c0c37d1054966ca662a44259e96cc7aea4413" + "sha": "19465d3ec5e5acdb01521d8f3bddd311bcbee28d" } } ], From c750ffae32a4483a15eaea30b6fa739f576f2ebe Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 21 Apr 2020 20:40:16 -0700 Subject: [PATCH 038/333] build: adopt changes to generator formatting (#61) --- handwritten/bigquery-storage/protos/protos.js | 412 +++++++++--------- handwritten/bigquery-storage/synth.metadata | 2 +- 2 files changed, 207 insertions(+), 207 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 402f59277c5..d1ff86ad210 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -131,7 +131,7 @@ ArrowSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); return writer; }; @@ -336,9 +336,9 @@ ArrowRecordBatch.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; @@ -560,7 +560,7 @@ AvroSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.schema != null && message.hasOwnProperty("schema")) + if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); return writer; }; @@ -756,9 +756,9 @@ AvroRows.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; @@ -1132,11 +1132,11 @@ CreateReadSessionRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.readSession != null && message.hasOwnProperty("readSession")) + if (message.readSession != null && Object.hasOwnProperty.call(message, "readSession")) $root.google.cloud.bigquery.storage.v1.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + if (message.maxStreamCount != null && Object.hasOwnProperty.call(message, "maxStreamCount")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount); return writer; }; @@ -1360,9 +1360,9 @@ ReadRowsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.readStream != null && message.hasOwnProperty("readStream")) + if (message.readStream != null && Object.hasOwnProperty.call(message, "readStream")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.readStream); - if (message.offset != null && message.hasOwnProperty("offset")) + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); return writer; }; @@ -1575,7 +1575,7 @@ ThrottleState.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); return writer; }; @@ -1762,7 +1762,7 @@ StreamStats.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.progress != null && message.hasOwnProperty("progress")) + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.encode(message.progress, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -1960,9 +1960,9 @@ Progress.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) writer.uint32(/* id 1, wireType 1 =*/9).double(message.atResponseStart); - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) writer.uint32(/* id 2, wireType 1 =*/17).double(message.atResponseEnd); return writer; }; @@ -2214,15 +2214,15 @@ ReadRowsResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.stats != null && message.hasOwnProperty("stats")) + if (message.stats != null && Object.hasOwnProperty.call(message, "stats")) $root.google.cloud.bigquery.storage.v1.StreamStats.encode(message.stats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.avroRows != null && message.hasOwnProperty("avroRows")) + if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) $root.google.cloud.bigquery.storage.v1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) + if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.throttleState != null && message.hasOwnProperty("throttleState")) + if (message.throttleState != null && Object.hasOwnProperty.call(message, "throttleState")) $root.google.cloud.bigquery.storage.v1.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); return writer; }; @@ -2510,9 +2510,9 @@ SplitReadStreamRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.fraction != null && message.hasOwnProperty("fraction")) + if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) writer.uint32(/* id 2, wireType 1 =*/17).double(message.fraction); return writer; }; @@ -2720,9 +2720,9 @@ SplitReadStreamResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -2878,7 +2878,7 @@ /** * DataFormat enum. * @name google.cloud.bigquery.storage.v1.DataFormat - * @enum {string} + * @enum {number} * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value * @property {number} AVRO=1 AVRO value * @property {number} ARROW=2 ARROW value @@ -3034,21 +3034,21 @@ ReadSession.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && message.hasOwnProperty("expireTime")) + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.table != null && message.hasOwnProperty("table")) + if (message.table != null && Object.hasOwnProperty.call(message, "table")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.readOptions != null && message.hasOwnProperty("readOptions")) + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); if (message.streams != null && message.streams.length) for (var i = 0; i < message.streams.length; ++i) @@ -3400,7 +3400,7 @@ TableModifiers.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; @@ -3605,7 +3605,7 @@ if (message.selectedFields != null && message.selectedFields.length) for (var i = 0; i < message.selectedFields.length; ++i) writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); return writer; }; @@ -3821,7 +3821,7 @@ ReadStream.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -4020,7 +4020,7 @@ ArrowSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); return writer; }; @@ -4225,9 +4225,9 @@ ArrowRecordBatch.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; @@ -4449,7 +4449,7 @@ AvroSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.schema != null && message.hasOwnProperty("schema")) + if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); return writer; }; @@ -4645,9 +4645,9 @@ AvroRows.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; @@ -4882,7 +4882,7 @@ if (message.selectedFields != null && message.selectedFields.length) for (var i = 0; i < message.selectedFields.length; ++i) writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); return writer; }; @@ -5295,7 +5295,7 @@ Stream.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -5491,9 +5491,9 @@ StreamPosition.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.stream != null && message.hasOwnProperty("stream")) + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.offset != null && message.hasOwnProperty("offset")) + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); return writer; }; @@ -5789,22 +5789,22 @@ ReadSession.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && message.hasOwnProperty("expireTime")) + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.streams != null && message.streams.length) for (var i = 0; i < message.streams.length; ++i) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.tableReference != null && message.hasOwnProperty("tableReference")) + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); return writer; }; @@ -6199,19 +6199,19 @@ CreateReadSessionRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.tableReference != null && message.hasOwnProperty("tableReference")) + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); - if (message.readOptions != null && message.hasOwnProperty("readOptions")) + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.format != null && message.hasOwnProperty("format")) + if (message.format != null && Object.hasOwnProperty.call(message, "format")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); return writer; }; @@ -6463,7 +6463,7 @@ /** * DataFormat enum. * @name google.cloud.bigquery.storage.v1beta1.DataFormat - * @enum {string} + * @enum {number} * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value * @property {number} AVRO=1 AVRO value * @property {number} ARROW=3 ARROW value @@ -6479,7 +6479,7 @@ /** * ShardingStrategy enum. * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy - * @enum {string} + * @enum {number} * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value * @property {number} LIQUID=1 LIQUID value * @property {number} BALANCED=2 BALANCED value @@ -6548,7 +6548,7 @@ ReadRowsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.readPosition != null && message.hasOwnProperty("readPosition")) + if (message.readPosition != null && Object.hasOwnProperty.call(message, "readPosition")) $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; @@ -6767,13 +6767,13 @@ StreamStatus.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); - if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + if (message.fractionConsumed != null && Object.hasOwnProperty.call(message, "fractionConsumed")) writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); - if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + if (message.isSplittable != null && Object.hasOwnProperty.call(message, "isSplittable")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); - if (message.progress != null && message.hasOwnProperty("progress")) + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -7022,9 +7022,9 @@ Progress.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); return writer; }; @@ -7223,7 +7223,7 @@ ThrottleStatus.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); return writer; }; @@ -7460,15 +7460,15 @@ ReadRowsResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.status != null && message.hasOwnProperty("status")) + if (message.status != null && Object.hasOwnProperty.call(message, "status")) $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.avroRows != null && message.hasOwnProperty("avroRows")) + if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) + if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) + if (message.throttleStatus != null && Object.hasOwnProperty.call(message, "throttleStatus")) $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); return writer; }; @@ -7756,9 +7756,9 @@ BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.session != null && message.hasOwnProperty("session")) + if (message.session != null && Object.hasOwnProperty.call(message, "session")) $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); return writer; }; @@ -8170,7 +8170,7 @@ FinalizeStreamRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.stream != null && message.hasOwnProperty("stream")) + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -8371,9 +8371,9 @@ SplitReadStreamRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.originalStream != null && message.hasOwnProperty("originalStream")) + if (message.originalStream != null && Object.hasOwnProperty.call(message, "originalStream")) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fraction != null && message.hasOwnProperty("fraction")) + if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); return writer; }; @@ -8586,9 +8586,9 @@ SplitReadStreamResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -8815,11 +8815,11 @@ TableReference.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.projectId != null && message.hasOwnProperty("projectId")) + if (message.projectId != null && Object.hasOwnProperty.call(message, "projectId")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); - if (message.datasetId != null && message.hasOwnProperty("datasetId")) + if (message.datasetId != null && Object.hasOwnProperty.call(message, "datasetId")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); - if (message.tableId != null && message.hasOwnProperty("tableId")) + if (message.tableId != null && Object.hasOwnProperty.call(message, "tableId")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); return writer; }; @@ -9029,7 +9029,7 @@ TableModifiers.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; @@ -9255,7 +9255,7 @@ if (message.rules != null && message.rules.length) for (var i = 0; i < message.rules.length; ++i) $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); return writer; }; @@ -9569,26 +9569,26 @@ HttpRule.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.selector != null && message.hasOwnProperty("selector")) + if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); - if (message.get != null && message.hasOwnProperty("get")) + if (message.get != null && Object.hasOwnProperty.call(message, "get")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); - if (message.put != null && message.hasOwnProperty("put")) + if (message.put != null && Object.hasOwnProperty.call(message, "put")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); - if (message.post != null && message.hasOwnProperty("post")) + if (message.post != null && Object.hasOwnProperty.call(message, "post")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); - if (message["delete"] != null && message.hasOwnProperty("delete")) + if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); - if (message.patch != null && message.hasOwnProperty("patch")) + if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); - if (message.body != null && message.hasOwnProperty("body")) + if (message.body != null && Object.hasOwnProperty.call(message, "body")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); - if (message.custom != null && message.hasOwnProperty("custom")) + if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); if (message.additionalBindings != null && message.additionalBindings.length) for (var i = 0; i < message.additionalBindings.length; ++i) $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); - if (message.responseBody != null && message.hasOwnProperty("responseBody")) + if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); return writer; }; @@ -9945,9 +9945,9 @@ CustomHttpPattern.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.kind != null && message.hasOwnProperty("kind")) + if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); - if (message.path != null && message.hasOwnProperty("path")) + if (message.path != null && Object.hasOwnProperty.call(message, "path")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); return writer; }; @@ -10093,7 +10093,7 @@ /** * FieldBehavior enum. * @name google.api.FieldBehavior - * @enum {string} + * @enum {number} * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value * @property {number} OPTIONAL=1 OPTIONAL value * @property {number} REQUIRED=2 REQUIRED value @@ -10214,18 +10214,18 @@ ResourceDescriptor.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); if (message.pattern != null && message.pattern.length) for (var i = 0; i < message.pattern.length; ++i) writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); - if (message.nameField != null && message.hasOwnProperty("nameField")) + if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); - if (message.history != null && message.hasOwnProperty("history")) + if (message.history != null && Object.hasOwnProperty.call(message, "history")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); - if (message.plural != null && message.hasOwnProperty("plural")) + if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); - if (message.singular != null && message.hasOwnProperty("singular")) + if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); return writer; }; @@ -10445,7 +10445,7 @@ /** * History enum. * @name google.api.ResourceDescriptor.History - * @enum {string} + * @enum {number} * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value @@ -10526,9 +10526,9 @@ ResourceReference.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.childType != null && message.hasOwnProperty("childType")) + if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); return writer; }; @@ -11053,9 +11053,9 @@ FileDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message["package"] != null && message.hasOwnProperty("package")) + if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); if (message.dependency != null && message.dependency.length) for (var i = 0; i < message.dependency.length; ++i) @@ -11072,9 +11072,9 @@ if (message.extension != null && message.extension.length) for (var i = 0; i < message.extension.length; ++i) $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); if (message.publicDependency != null && message.publicDependency.length) for (var i = 0; i < message.publicDependency.length; ++i) @@ -11082,7 +11082,7 @@ if (message.weakDependency != null && message.weakDependency.length) for (var i = 0; i < message.weakDependency.length; ++i) writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); - if (message.syntax != null && message.hasOwnProperty("syntax")) + if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); return writer; }; @@ -11620,7 +11620,7 @@ DescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.field != null && message.field.length) for (var i = 0; i < message.field.length; ++i) @@ -11637,7 +11637,7 @@ if (message.extension != null && message.extension.length) for (var i = 0; i < message.extension.length; ++i) $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); if (message.oneofDecl != null && message.oneofDecl.length) for (var i = 0; i < message.oneofDecl.length; ++i) @@ -12102,11 +12102,11 @@ ExtensionRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -12330,9 +12330,9 @@ ReservedRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); return writer; }; @@ -12823,25 +12823,25 @@ FieldDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.extendee != null && message.hasOwnProperty("extendee")) + if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); - if (message.number != null && message.hasOwnProperty("number")) + if (message.number != null && Object.hasOwnProperty.call(message, "number")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); - if (message.label != null && message.hasOwnProperty("label")) + if (message.label != null && Object.hasOwnProperty.call(message, "label")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); - if (message.typeName != null && message.hasOwnProperty("typeName")) + if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); - if (message.jsonName != null && message.hasOwnProperty("jsonName")) + if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); return writer; }; @@ -13188,7 +13188,7 @@ /** * Type enum. * @name google.protobuf.FieldDescriptorProto.Type - * @enum {string} + * @enum {number} * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value * @property {number} TYPE_INT64=3 TYPE_INT64 value @@ -13234,7 +13234,7 @@ /** * Label enum. * @name google.protobuf.FieldDescriptorProto.Label - * @enum {string} + * @enum {number} * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value @@ -13315,9 +13315,9 @@ OneofDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -13560,12 +13560,12 @@ EnumDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.value != null && message.value.length) for (var i = 0; i < message.value.length; ++i) $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.reservedRange != null && message.reservedRange.length) for (var i = 0; i < message.reservedRange.length; ++i) @@ -13868,9 +13868,9 @@ EnumReservedRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); return writer; }; @@ -14090,11 +14090,11 @@ EnumValueDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.number != null && message.hasOwnProperty("number")) + if (message.number != null && Object.hasOwnProperty.call(message, "number")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -14328,12 +14328,12 @@ ServiceDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.method != null && message.method.length) for (var i = 0; i < message.method.length; ++i) $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -14613,17 +14613,17 @@ MethodDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.inputType != null && message.hasOwnProperty("inputType")) + if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); - if (message.outputType != null && message.hasOwnProperty("outputType")) + if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); return writer; }; @@ -15062,45 +15062,45 @@ FileOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); - if (message.goPackage != null && message.hasOwnProperty("goPackage")) + if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -15527,7 +15527,7 @@ /** * OptimizeMode enum. * @name google.protobuf.FileOptions.OptimizeMode - * @enum {string} + * @enum {number} * @property {number} SPEED=1 SPEED value * @property {number} CODE_SIZE=2 CODE_SIZE value * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value @@ -15645,18 +15645,18 @@ MessageOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); return writer; }; @@ -15998,17 +15998,17 @@ FieldOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.ctype != null && message.hasOwnProperty("ctype")) + if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); - if (message.packed != null && message.hasOwnProperty("packed")) + if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.lazy != null && message.hasOwnProperty("lazy")) + if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); - if (message.jstype != null && message.hasOwnProperty("jstype")) + if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); - if (message.weak != null && message.hasOwnProperty("weak")) + if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -16019,7 +16019,7 @@ writer.int32(message[".google.api.fieldBehavior"][i]); writer.ldelim(); } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); return writer; }; @@ -16355,7 +16355,7 @@ /** * CType enum. * @name google.protobuf.FieldOptions.CType - * @enum {string} + * @enum {number} * @property {number} STRING=0 STRING value * @property {number} CORD=1 CORD value * @property {number} STRING_PIECE=2 STRING_PIECE value @@ -16371,7 +16371,7 @@ /** * JSType enum. * @name google.protobuf.FieldOptions.JSType - * @enum {string} + * @enum {number} * @property {number} JS_NORMAL=0 JS_NORMAL value * @property {number} JS_STRING=1 JS_STRING value * @property {number} JS_NUMBER=2 JS_NUMBER value @@ -16670,9 +16670,9 @@ EnumOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -16915,7 +16915,7 @@ EnumValueOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -17164,14 +17164,14 @@ ServiceOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); return writer; }; @@ -17450,9 +17450,9 @@ MethodOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -17460,7 +17460,7 @@ if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); return writer; }; @@ -17694,7 +17694,7 @@ /** * IdempotencyLevel enum. * @name google.protobuf.MethodOptions.IdempotencyLevel - * @enum {string} + * @enum {number} * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value * @property {number} IDEMPOTENT=2 IDEMPOTENT value @@ -17824,17 +17824,17 @@ if (message.name != null && message.name.length) for (var i = 0; i < message.name.length; ++i) $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); - if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); return writer; }; @@ -18611,9 +18611,9 @@ writer.int32(message.span[i]); writer.ldelim(); } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) for (var i = 0; i < message.leadingDetachedComments.length; ++i) @@ -19144,11 +19144,11 @@ writer.int32(message.path[i]); writer.ldelim(); } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); - if (message.begin != null && message.hasOwnProperty("begin")) + if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); return writer; }; @@ -19401,9 +19401,9 @@ Timestamp.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.seconds != null && message.hasOwnProperty("seconds")) + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && message.hasOwnProperty("nanos")) + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); return writer; }; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index dc56cff4f20..19a48798bee 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "aa16f9c028724dbab16ece815408771a36935dee" + "sha": "492650446385640eeec9036f9252b3b88ea49d53" } }, { From 95e068a1ab9cfd8292a980122658a052f117054a Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Thu, 23 Apr 2020 19:35:01 -0700 Subject: [PATCH 039/333] chore: update npm scripts and synth.py (#62) Update npm scripts: add clean, prelint, prefix; make sure that lint and fix are set properly. Use post-process feature of synthtool. --- handwritten/bigquery-storage/package.json | 5 +++-- handwritten/bigquery-storage/synth.py | 7 ++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c430768b059..fd1d33664cc 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -19,11 +19,12 @@ "docs-test": "linkinator docs", "fix": "gts fix", "prelint": "cd samples; npm link ../; npm install", - "lint": "gts fix", + "lint": "gts check", "prepare": "npm run compile-protos && npm run compile", "system-test": "c8 mocha build/system-test", "test": "c8 mocha build/test", - "samples-test": "cd samples/ && npm link ../ && npm test && cd ../" + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "precompile": "gts clean" }, "dependencies": { "google-gax": "^2.1.0" diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index 24ecbba9777..01ba810400a 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -14,7 +14,7 @@ """This script is used to synthesize generated parts of this library.""" import synthtool as s import synthtool.gcp as gcp -import subprocess +import synthtool.languages.node as node import logging logging.basicConfig(level=logging.DEBUG) @@ -39,7 +39,4 @@ common_templates = gcp.CommonTemplates() templates = common_templates.node_library(source_location='build/src') s.copy(templates, excludes=[]) -# Node.js specific cleanup -subprocess.run(['npm', 'install']) -subprocess.run(['npm', 'run', 'fix']) -subprocess.run(['npx', 'compileProtos', 'src']) \ No newline at end of file +node.postprocess_gapic_library() From 906db7cd4d128b9da5eb5e66618ee68ced145f27 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Tue, 5 May 2020 19:38:07 -0700 Subject: [PATCH 040/333] fix: regen protos and tests (#63) --- handwritten/bigquery-storage/synth.metadata | 14 +++-------- .../test/gapic_big_query_read_v1.ts | 12 +++------- .../test/gapic_big_query_storage_v1beta1.ts | 23 +++++++------------ 3 files changed, 14 insertions(+), 35 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 19a48798bee..b20225c0063 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,23 +3,15 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "492650446385640eeec9036f9252b3b88ea49d53" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "42ee97c1b93a0e3759bbba3013da309f670a90ab", - "internalRef": "307114445" + "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", + "sha": "775dd910ebac4fcc62f728c2d354ef503fb757f7" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "19465d3ec5e5acdb01521d8f3bddd311bcbee28d" + "sha": "ab883569eb0257bbf16a6d825fd018b3adde3912" } } ], diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 8e8aee52a2b..bac7067ae3c 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -260,9 +260,7 @@ describe('v1.BigQueryReadClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.createReadSession(request); - }, expectedError); + await assert.rejects(client.createReadSession(request), expectedError); assert( (client.innerApiCalls.createReadSession as SinonStub) .getCall(0) @@ -374,9 +372,7 @@ describe('v1.BigQueryReadClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.splitReadStream(request); - }, expectedError); + await assert.rejects(client.splitReadStream(request), expectedError); assert( (client.innerApiCalls.splitReadStream as SinonStub) .getCall(0) @@ -468,9 +464,7 @@ describe('v1.BigQueryReadClient', () => { reject(err); }); }); - await assert.rejects(async () => { - await promise; - }, expectedError); + await assert.rejects(promise, expectedError); assert( (client.innerApiCalls.readRows as SinonStub) .getCall(0) diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 7321fc43ee4..e122cb23bd2 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -271,9 +271,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.createReadSession(request); - }, expectedError); + await assert.rejects(client.createReadSession(request), expectedError); assert( (client.innerApiCalls.createReadSession as SinonStub) .getCall(0) @@ -390,9 +388,10 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.batchCreateReadSessionStreams(request); - }, expectedError); + await assert.rejects( + client.batchCreateReadSessionStreams(request), + expectedError + ); assert( (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) .getCall(0) @@ -507,9 +506,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.finalizeStream(request); - }, expectedError); + await assert.rejects(client.finalizeStream(request), expectedError); assert( (client.innerApiCalls.finalizeStream as SinonStub) .getCall(0) @@ -624,9 +621,7 @@ describe('v1beta1.BigQueryStorageClient', () => { undefined, expectedError ); - await assert.rejects(async () => { - await client.splitReadStream(request); - }, expectedError); + await assert.rejects(client.splitReadStream(request), expectedError); assert( (client.innerApiCalls.splitReadStream as SinonStub) .getCall(0) @@ -722,9 +717,7 @@ describe('v1beta1.BigQueryStorageClient', () => { reject(err); }); }); - await assert.rejects(async () => { - await promise; - }, expectedError); + await assert.rejects(promise, expectedError); assert( (client.innerApiCalls.readRows as SinonStub) .getCall(0) From a813370103fac045f84a825516c439d0b65dc299 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 6 May 2020 12:00:07 -0700 Subject: [PATCH 041/333] feat: additional type annotation (#64) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/d5e01c0b-0d2d-4da4-b495-86674ea6021f/targets --- .../bigquery/storage/v1beta1/arrow.proto | 3 +-- .../cloud/bigquery/storage/v1beta1/avro.proto | 3 +-- .../storage/v1beta1/read_options.proto | 3 +-- .../bigquery/storage/v1beta1/storage.proto | 21 ++++++++++--------- .../storage/v1beta1/table_reference.proto | 3 +-- .../bigquery-storage/protos/protos.json | 13 +++++++++--- .../src/v1beta1/big_query_storage_client.ts | 4 ++-- handwritten/bigquery-storage/synth.metadata | 12 +++++++++-- 8 files changed, 37 insertions(+), 25 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto index 3003de444c2..f70c61c7246 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto index 021d8e44f9f..7d034a28a7e 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 8ed9b73f6cf..1ff8d8b5eb6 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 22f742fbb65..81e77c73af5 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -204,7 +203,12 @@ message CreateReadSessionRequest { // Required. String of the form `projects/{project_id}` indicating the // project this ReadSession is associated with. This is the project that will // be billed for usage. - string parent = 6 [(google.api.field_behavior) = REQUIRED]; + string parent = 6 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Any modifiers to the Table (e.g. snapshot timestamp). TableModifiers table_modifiers = 2; @@ -286,9 +290,6 @@ message StreamStatus { float fraction_consumed = 2; // Represents the progress of the current stream. - // - // Note: This value is under development and should not be used. Use - // `fraction_consumed` instead. Progress progress = 4; // Whether this stream can be split. For sessions that use the LIQUID sharding @@ -373,14 +374,14 @@ message BatchCreateReadSessionStreamsResponse { // Request information for invoking `FinalizeStream`. message FinalizeStreamRequest { - // Stream to finalize. - Stream stream = 2; + // Required. Stream to finalize. + Stream stream = 2 [(google.api.field_behavior) = REQUIRED]; } // Request information for `SplitReadStream`. message SplitReadStreamRequest { - // Stream to split. - Stream original_stream = 1; + // Required. Stream to split. + Stream original_stream = 1 [(google.api.field_behavior) = REQUIRED]; // A value in the range (0.0, 1.0) that specifies the fractional point at // which the original stream should be split. The actual split point is diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index a55dc48eb02..4269392f676 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index b14e1b22555..434e158d5fe 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -550,7 +550,8 @@ "type": "string", "id": 6, "options": { - "(google.api.field_behavior)": "REQUIRED" + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" } }, "tableModifiers": { @@ -703,7 +704,10 @@ "fields": { "stream": { "type": "Stream", - "id": 2 + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } } } }, @@ -711,7 +715,10 @@ "fields": { "originalStream": { "type": "Stream", - "id": 1 + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } }, "fraction": { "type": "float", diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 89d815f9e06..5e8f75d4414 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -585,7 +585,7 @@ export class BigQueryStorageClient { * @param {Object} request * The request object that will be sent. * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream - * Stream to finalize. + * Required. Stream to finalize. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. @@ -691,7 +691,7 @@ export class BigQueryStorageClient { * @param {Object} request * The request object that will be sent. * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.originalStream - * Stream to split. + * Required. Stream to split. * @param {number} request.fraction * A value in the range (0.0, 1.0) that specifies the fractional point at * which the original stream should be split. The actual split point is diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index b20225c0063..7dbe8b175bc 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,8 +3,16 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", - "sha": "775dd910ebac4fcc62f728c2d354ef503fb757f7" + "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", + "sha": "6293832961eedcdd57c24edc311f2c154781e34e" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "a3a0bf0f6291d69f2ff3df7fcd63d28ee20ac727", + "internalRef": "310060413" } }, { From 3e34d6945aa0984fe848a6a142c1a82d26cebc5c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 18 May 2020 09:58:07 -0700 Subject: [PATCH 042/333] build: do not fail builds on codecov errors (#528) (#66) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/74eabfaf-a964-4a5b-b26c-c28d4ae22df0/targets Source-Link: https://github.com/googleapis/synthtool/commit/be74d3e532faa47eb59f1a0eaebde0860d1d8ab4 --- handwritten/bigquery-storage/synth.metadata | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7dbe8b175bc..f76b8feb32a 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "6293832961eedcdd57c24edc311f2c154781e34e" + "sha": "2d76c0e16abedfaf106db063dc00f79e38166dad" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ab883569eb0257bbf16a6d825fd018b3adde3912" + "sha": "be74d3e532faa47eb59f1a0eaebde0860d1d8ab4" } } ], From 8d09cd5477db86e080a806495933b58e5386a071 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Jun 2020 14:52:08 -0700 Subject: [PATCH 043/333] chore: release 2.0.0 (#40) * updated CHANGELOG.md [ci skip] * updated package.json [ci skip] * updated samples/package.json [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 19 +++++++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index fee320ce64d..3070a86abbd 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [2.0.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.1.0...v2.0.0) (2020-05-18) + + +### ⚠ BREAKING CHANGES + +* The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM. + +### Features + +* add V1 client ([#28](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/28)) ([da10a33](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/da10a33ee30a6fa0b447ef16c8b755e3ac05a87c)) +* additional type annotation ([#64](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/64)) ([2d76c0e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2d76c0e16abedfaf106db063dc00f79e38166dad)) +* drop node8 support ([#39](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/39)) ([2f66ded](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2f66ded8db03f71d3f2b37a1d91e4f3f232d5eaf)) + + +### Bug Fixes + +* regen protos and tests ([#63](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/63)) ([6293832](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6293832961eedcdd57c24edc311f2c154781e34e)) +* remove eslint, update gax, fix generated protos, run the generator ([#49](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/49)) ([b5b9492](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b5b9492a0c4b86b868a2b33c5c350301db29cc65)) + ## [1.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.0.0...v1.1.0) (2020-03-06) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index fd1d33664cc..a9f57c51615 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "1.1.0", + "version": "2.0.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From f480c33f5784814db9a48831892b96ddf7dd583b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Jun 2020 18:13:42 -0700 Subject: [PATCH 044/333] chore: update protos.js --- .../bigquery-storage/protos/protos.d.ts | 6 ++++ handwritten/bigquery-storage/protos/protos.js | 28 +++++++++++++++++-- .../bigquery-storage/protos/protos.json | 6 +++- handwritten/bigquery-storage/synth.metadata | 2 +- 4 files changed, 37 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 33f0b578737..7d93ddb63d4 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -5223,6 +5223,9 @@ export namespace google { /** FieldDescriptorProto options */ options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); } /** Represents a FieldDescriptorProto. */ @@ -5264,6 +5267,9 @@ export namespace google { /** FieldDescriptorProto options. */ public options?: (google.protobuf.IFieldOptions|null); + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + /** * Creates a new FieldDescriptorProto instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index d1ff86ad210..5df38a02d53 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -28,7 +28,7 @@ var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; // Exported root namespace - var $root = $protobuf.roots._google_cloud_bigquery_storage_1_1_0_protos || ($protobuf.roots._google_cloud_bigquery_storage_1_1_0_protos = {}); + var $root = $protobuf.roots._google_cloud_bigquery_storage_2_0_0_protos || ($protobuf.roots._google_cloud_bigquery_storage_2_0_0_protos = {}); $root.google = (function() { @@ -12702,6 +12702,7 @@ * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex * @property {string|null} [jsonName] FieldDescriptorProto jsonName * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options + * @property {boolean|null} [proto3Optional] FieldDescriptorProto proto3Optional */ /** @@ -12799,6 +12800,14 @@ */ FieldDescriptorProto.prototype.options = null; + /** + * FieldDescriptorProto proto3Optional. + * @member {boolean} proto3Optional + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.proto3Optional = false; + /** * Creates a new FieldDescriptorProto instance using the specified properties. * @function create @@ -12843,6 +12852,8 @@ writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); + if (message.proto3Optional != null && Object.hasOwnProperty.call(message, "proto3Optional")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.proto3Optional); return writer; }; @@ -12907,6 +12918,9 @@ case 8: message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); break; + case 17: + message.proto3Optional = reader.bool(); + break; default: reader.skipType(tag & 7); break; @@ -13001,6 +13015,9 @@ if (error) return "options." + error; } + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + if (typeof message.proto3Optional !== "boolean") + return "proto3Optional: boolean expected"; return null; }; @@ -13123,6 +13140,8 @@ throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); } + if (object.proto3Optional != null) + message.proto3Optional = Boolean(object.proto3Optional); return message; }; @@ -13150,6 +13169,7 @@ object.options = null; object.oneofIndex = 0; object.jsonName = ""; + object.proto3Optional = false; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -13171,6 +13191,8 @@ object.oneofIndex = message.oneofIndex; if (message.jsonName != null && message.hasOwnProperty("jsonName")) object.jsonName = message.jsonName; + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + object.proto3Optional = message.proto3Optional; return object; }; @@ -14964,7 +14986,7 @@ * @memberof google.protobuf.FileOptions * @instance */ - FileOptions.prototype.ccEnableArenas = false; + FileOptions.prototype.ccEnableArenas = true; /** * FileOptions objcClassPrefix. @@ -15450,7 +15472,7 @@ object.javaGenerateEqualsAndHash = false; object.deprecated = false; object.javaStringCheckUtf8 = false; - object.ccEnableArenas = false; + object.ccEnableArenas = true; object.objcClassPrefix = ""; object.csharpNamespace = ""; object.swiftPrefix = ""; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 434e158d5fe..ca83026b195 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1190,6 +1190,10 @@ "options": { "type": "FieldOptions", "id": 8 + }, + "proto3Optional": { + "type": "bool", + "id": 17 } }, "nested": { @@ -1425,7 +1429,7 @@ "type": "bool", "id": 31, "options": { - "default": false + "default": true } }, "objcClassPrefix": { diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index f76b8feb32a..86e7c8d5dee 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "2d76c0e16abedfaf106db063dc00f79e38166dad" + "sha": "a83dff661119fc6dc667afffcf984694fe46ee73" } }, { From b8e833bca6efbb67587948504e2aec98a21130cc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 11 Jun 2020 17:40:24 -0700 Subject: [PATCH 045/333] feat(secrets): begin migration to secret manager from keystore (#70) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/8d5e906d-0de4-4e28-b374-7d5fd4a1ce62/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/1c92077459db3dc50741e878f98b08c6261181e0 --- .../.kokoro/populate-secrets.sh | 32 +++++++++++++++++++ .../bigquery-storage/.kokoro/publish.sh | 2 +- .../.kokoro/release/publish.cfg | 10 ++---- .../bigquery-storage/.kokoro/trampoline.sh | 1 + handwritten/bigquery-storage/protos/protos.js | 2 +- .../src/v1/big_query_read_client.ts | 7 ++++ .../src/v1beta1/big_query_storage_client.ts | 7 ++++ handwritten/bigquery-storage/synth.metadata | 4 +-- handwritten/bigquery-storage/tsconfig.json | 2 +- 9 files changed, 55 insertions(+), 12 deletions(-) create mode 100755 handwritten/bigquery-storage/.kokoro/populate-secrets.sh diff --git a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh new file mode 100755 index 00000000000..85801f403e2 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --credential-file-override=${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json \ + --project cloud-devrel-kokoro-resources \ + --secret $key > \ + "$SECRET_LOCATION/$key" +done diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index ff9c262295a..24957d71def 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -24,7 +24,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / cd $(dirname $0)/.. -NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-bigquery-storage-npm-token) +NPM_TOKEN=$(cat $KOKORO_GFILE_DIR/secret_manager/npm_publish_token echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc npm install diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 5531834c7ba..948aefb0856 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -47,13 +47,9 @@ before_action { } } -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-bigquery-storage-npm-token" - } - } +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "npm_publish_token" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/trampoline.sh b/handwritten/bigquery-storage/.kokoro/trampoline.sh index 9bd4905c4b5..a4241db23f4 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline.sh @@ -24,4 +24,5 @@ function cleanup() { } trap cleanup EXIT +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 5df38a02d53..faf2f6c3d8b 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -28,7 +28,7 @@ var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; // Exported root namespace - var $root = $protobuf.roots._google_cloud_bigquery_storage_2_0_0_protos || ($protobuf.roots._google_cloud_bigquery_storage_2_0_0_protos = {}); + var $root = $protobuf.roots._google_cloud_bigquery_storage_protos || ($protobuf.roots._google_cloud_bigquery_storage_protos = {}); $root.google = (function() { diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index ba031a72a85..622628516d6 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -92,6 +92,13 @@ export class BigQueryReadClient { } opts.servicePath = opts.servicePath || servicePath; opts.port = opts.port || port; + + // users can override the config from client side, like retry codes name. + // The detailed structure of the clientConfig can be found here: https://github.com/googleapis/gax-nodejs/blob/master/src/gax.ts#L546 + // The way to override client config for Showcase API: + // + // const customConfig = {"interfaces": {"google.showcase.v1beta1.Echo": {"methods": {"Echo": {"retry_codes_name": "idempotent", "retry_params_name": "default"}}}}} + // const showcaseClient = new showcaseClient({ projectId, customConfig }); opts.clientConfig = opts.clientConfig || {}; const isBrowser = typeof window !== 'undefined'; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 5e8f75d4414..e24f7d9a7e8 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -92,6 +92,13 @@ export class BigQueryStorageClient { } opts.servicePath = opts.servicePath || servicePath; opts.port = opts.port || port; + + // users can override the config from client side, like retry codes name. + // The detailed structure of the clientConfig can be found here: https://github.com/googleapis/gax-nodejs/blob/master/src/gax.ts#L546 + // The way to override client config for Showcase API: + // + // const customConfig = {"interfaces": {"google.showcase.v1beta1.Echo": {"methods": {"Echo": {"retry_codes_name": "idempotent", "retry_params_name": "default"}}}}} + // const showcaseClient = new showcaseClient({ projectId, customConfig }); opts.clientConfig = opts.clientConfig || {}; const isBrowser = typeof window !== 'undefined'; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 86e7c8d5dee..140b12c58f2 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "a83dff661119fc6dc667afffcf984694fe46ee73" + "sha": "20d9978cf09493d982bde1914f4ba1ef6fc21711" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "be74d3e532faa47eb59f1a0eaebde0860d1d8ab4" + "sha": "1c92077459db3dc50741e878f98b08c6261181e0" } } ], diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json index 613d35597b5..c78f1c884ef 100644 --- a/handwritten/bigquery-storage/tsconfig.json +++ b/handwritten/bigquery-storage/tsconfig.json @@ -5,7 +5,7 @@ "outDir": "build", "resolveJsonModule": true, "lib": [ - "es2016", + "es2018", "dom" ] }, From d92389758278fc7895ee97e1e014f162137cea41 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 12 Jun 2020 11:05:03 -0700 Subject: [PATCH 046/333] fix: handle fallback option properly (#73) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(nodejs_templates): add script logging to node_library populate-secrets.sh Co-authored-by: Benjamin E. Coe Source-Author: BenWhitehead Source-Date: Wed Jun 10 22:24:28 2020 -0400 Source-Repo: googleapis/synthtool Source-Sha: e7034945fbdc0e79d3c57f6e299e5c90b0f11469 Source-Link: https://github.com/googleapis/synthtool/commit/e7034945fbdc0e79d3c57f6e299e5c90b0f11469 --- .../bigquery-storage/.kokoro/populate-secrets.sh | 12 ++++++++++++ .../src/v1/big_query_read_client.ts | 13 +++++-------- .../src/v1beta1/big_query_storage_client.ts | 13 +++++-------- handwritten/bigquery-storage/synth.metadata | 4 ++-- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh index 85801f403e2..e6ce8200d75 100755 --- a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh +++ b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh @@ -15,12 +15,19 @@ set -eo pipefail +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + # Populates requested secrets set in SECRET_MANAGER_KEYS from service account: # kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" mkdir -p ${SECRET_LOCATION} for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") do + msg "Retrieving secret ${key}" docker run --entrypoint=gcloud \ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ gcr.io/google.com/cloudsdktool/cloud-sdk \ @@ -29,4 +36,9 @@ do --project cloud-devrel-kokoro-resources \ --secret $key > \ "$SECRET_LOCATION/$key" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi done diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 622628516d6..aca62fdcaca 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -101,14 +101,11 @@ export class BigQueryReadClient { // const showcaseClient = new showcaseClient({ projectId, customConfig }); opts.clientConfig = opts.clientConfig || {}; - const isBrowser = typeof window !== 'undefined'; - if (isBrowser) { - opts.fallback = true; - } - // If we are in browser, we are already using fallback because of the - // "browser" field in package.json. - // But if we were explicitly requested to use fallback, let's do it now. - this._gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; + // If we're running in browser, it's OK to omit `fallback` since + // google-gax has `browser` field in its `package.json`. + // For Electron (which does not respect `browser` field), + // pass `{fallback: true}` to the BigQueryReadClient constructor. + this._gaxModule = opts.fallback ? gax.fallback : gax; // Create a `gaxGrpc` object, with any grpc-specific options // sent to the client. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index e24f7d9a7e8..0bad12e8eab 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -101,14 +101,11 @@ export class BigQueryStorageClient { // const showcaseClient = new showcaseClient({ projectId, customConfig }); opts.clientConfig = opts.clientConfig || {}; - const isBrowser = typeof window !== 'undefined'; - if (isBrowser) { - opts.fallback = true; - } - // If we are in browser, we are already using fallback because of the - // "browser" field in package.json. - // But if we were explicitly requested to use fallback, let's do it now. - this._gaxModule = !isBrowser && opts.fallback ? gax.fallback : gax; + // If we're running in browser, it's OK to omit `fallback` since + // google-gax has `browser` field in its `package.json`. + // For Electron (which does not respect `browser` field), + // pass `{fallback: true}` to the BigQueryStorageClient constructor. + this._gaxModule = opts.fallback ? gax.fallback : gax; // Create a `gaxGrpc` object, with any grpc-specific options // sent to the client. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 140b12c58f2..3b53e6abd39 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "20d9978cf09493d982bde1914f4ba1ef6fc21711" + "sha": "6513e8cf6195740b570b39fb645d8a1adafc0580" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "1c92077459db3dc50741e878f98b08c6261181e0" + "sha": "e7034945fbdc0e79d3c57f6e299e5c90b0f11469" } } ], From 9161cc1d62fc163e78d91805228e5797a2eae315 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Jun 2020 20:40:50 +0200 Subject: [PATCH 047/333] chore(deps): update dependency mocha to v8 (#71) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a9f57c51615..b93033a76ca 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -39,7 +39,7 @@ "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", "linkinator": "^2.0.1", - "mocha": "^7.0.1", + "mocha": "^8.0.0", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^9.0.1", From e6a55d09f8add6d05a05a238bcc235db7b8162ef Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 16 Jun 2020 18:46:08 +0000 Subject: [PATCH 048/333] chore: release 2.1.0 (#72) :robot: I have created a release \*beep\* \*boop\* --- ## [2.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.0.0...v2.1.0) (2020-06-12) ### Features * **secrets:** begin migration to secret manager from keystore ([#70](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/70)) ([6513e8c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6513e8cf6195740b570b39fb645d8a1adafc0580)) ### Bug Fixes * handle fallback option properly ([#73](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/73)) ([ec6b88c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ec6b88cf87bf45e0f16935b8b27f15447aa385b9)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 12 ++++++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 3070a86abbd..0a782d3f34a 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [2.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.0.0...v2.1.0) (2020-06-12) + + +### Features + +* **secrets:** begin migration to secret manager from keystore ([#70](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/70)) ([6513e8c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6513e8cf6195740b570b39fb645d8a1adafc0580)) + + +### Bug Fixes + +* handle fallback option properly ([#73](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/73)) ([ec6b88c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ec6b88cf87bf45e0f16935b8b27f15447aa385b9)) + ## [2.0.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.1.0...v2.0.0) (2020-05-18) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index b93033a76ca..734405b3615 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.0.0", + "version": "2.1.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 6e2e1d67af2e908f218cd7625a2ab2fbac6ace43 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Fri, 19 Jun 2020 13:13:20 -0700 Subject: [PATCH 049/333] feat: promote library to GA (#75) --- .../.github/ISSUE_TEMPLATE/bug_report.md | 11 ++++++++--- handwritten/bigquery-storage/.repo-metadata.json | 2 +- handwritten/bigquery-storage/README.md | 11 ++++++----- handwritten/bigquery-storage/synth.metadata | 10 +++++----- 4 files changed, 20 insertions(+), 14 deletions(-) diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md index 5adacf4591c..3902f23b2d7 100644 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,13 +8,18 @@ Thanks for stopping by to let us know something could be better! **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. -Please run down the following list and make sure you've tried the usual "quick fixes": +1) Is this a client library issue or a product issue? +This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. +2) Did someone already solve this? - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery-storage/issues - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js + - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js -If you are still having issues, please be sure to include as much information as possible: +3) Do you have a support contract? +Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. + +If the support paths suggested above still do not result in a resolution, please provide the following details. #### Environment details diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index 09762cf5274..7ae251898a0 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -4,7 +4,7 @@ "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", "issue_tracker": "https://b.corp.google.com/savedsearches/559654", - "release_level": "beta", + "release_level": "ga", "language": "nodejs", "repo": "googleapis/nodejs-bigquery-storage", "distribution_name": "@google-cloud/bigquery-storage", diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 6e66eb377d7..bae285ff153 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -4,7 +4,7 @@ # [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/nodejs-bigquery-storage) -[![release level](https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) [![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) [![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery-storage/master.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery-storage) @@ -219,11 +219,12 @@ _Legacy Node.js versions are supported as a best effort:_ This library follows [Semantic Versioning](http://semver.org/). +This library is considered to be **General Availability (GA)**. This means it +is stable; the code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **GA** libraries +are addressed with the highest priority. -This library is considered to be in **beta**. This means it is expected to be -mostly stable while we work toward a general availability release; however, -complete stability is not guaranteed. We will address issues and requests -against beta libraries with a high priority. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 3b53e6abd39..c6765e428d7 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "6513e8cf6195740b570b39fb645d8a1adafc0580" + "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", + "sha": "a6c5bf05da2d751dc3c656d387ac983455b24781" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a3a0bf0f6291d69f2ff3df7fcd63d28ee20ac727", - "internalRef": "310060413" + "sha": "eb37e688331443969eed9b969531751154a956d5", + "internalRef": "317130948" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "e7034945fbdc0e79d3c57f6e299e5c90b0f11469" + "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" } } ], From f16470785bc2b28425b8580e78073581b7897fb6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sun, 21 Jun 2020 14:28:37 -0700 Subject: [PATCH 050/333] chore: release 2.2.0 (#78) * updated CHANGELOG.md [ci skip] * updated package.json [ci skip] * updated samples/package.json Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 0a782d3f34a..c37dd9bf6c8 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.2.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.1.0...v2.2.0) (2020-06-19) + + +### Features + +* promote library to GA ([#75](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/75)) ([7d7a67e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/7d7a67e9198e87cdcc4911d9505a121f1a1d9549)) + ## [2.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.0.0...v2.1.0) (2020-06-12) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 734405b3615..aa9ebc44f91 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.1.0", + "version": "2.2.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From c036ebd47d8f432e33ff308450b457e41555eb80 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Mon, 22 Jun 2020 10:16:30 -0700 Subject: [PATCH 051/333] chore: add blunderbuss config (#77) --- handwritten/bigquery-storage/.github/blunderbuss.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 handwritten/bigquery-storage/.github/blunderbuss.yaml diff --git a/handwritten/bigquery-storage/.github/blunderbuss.yaml b/handwritten/bigquery-storage/.github/blunderbuss.yaml new file mode 100644 index 00000000000..b5a53f68cc0 --- /dev/null +++ b/handwritten/bigquery-storage/.github/blunderbuss.yaml @@ -0,0 +1,8 @@ +assign_issues: + - sofisl + - bcoe + - steffnay +assign_prs: + - sofisl + - bcoe + - steffnay From f3de7c15fe513a974b737240f05eff2744b3ae53 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sat, 27 Jun 2020 17:48:06 -0700 Subject: [PATCH 052/333] build: add config .gitattributes (#80) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/2a81bca4-7abd-4108-ac1f-21340f858709/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/dc9caca650c77b7039e2bbc3339ffb34ae78e5b7 --- handwritten/bigquery-storage/.gitattributes | 3 +++ handwritten/bigquery-storage/.kokoro/.gitattributes | 1 + handwritten/bigquery-storage/synth.metadata | 6 +++--- 3 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 handwritten/bigquery-storage/.gitattributes create mode 100644 handwritten/bigquery-storage/.kokoro/.gitattributes diff --git a/handwritten/bigquery-storage/.gitattributes b/handwritten/bigquery-storage/.gitattributes new file mode 100644 index 00000000000..2e63216ae9c --- /dev/null +++ b/handwritten/bigquery-storage/.gitattributes @@ -0,0 +1,3 @@ +*.ts text eol=lf +*.js test eol=lf +protos/* linguist-generated diff --git a/handwritten/bigquery-storage/.kokoro/.gitattributes b/handwritten/bigquery-storage/.kokoro/.gitattributes new file mode 100644 index 00000000000..87acd4f484e --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/.gitattributes @@ -0,0 +1 @@ +* linguist-generated=true diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index c6765e428d7..b9498181ba3 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,8 +3,8 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", - "sha": "a6c5bf05da2d751dc3c656d387ac983455b24781" + "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", + "sha": "2c4f58676e3fdb4bbf1d565d087e44587a854e7f" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" + "sha": "dc9caca650c77b7039e2bbc3339ffb34ae78e5b7" } } ], From 2c33d7e44bb04136aa5e583811bf3904885d14c1 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Mon, 6 Jul 2020 10:17:39 -0700 Subject: [PATCH 053/333] build: use bazel build (#81) --- .../src/v1/big_query_read_client.ts | 26 +++++++++++++ .../src/v1beta1/big_query_storage_client.ts | 26 +++++++++++++ handwritten/bigquery-storage/synth.metadata | 26 +++++-------- handwritten/bigquery-storage/synth.py | 14 ++----- .../test/gapic_big_query_read_v1.ts | 38 +++++++++++++++++++ .../test/gapic_big_query_storage_v1beta1.ts | 38 +++++++++++++++++++ 6 files changed, 140 insertions(+), 28 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index aca62fdcaca..7d054605a41 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -153,6 +153,9 @@ export class BigQueryReadClient { // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), readSessionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}' ), @@ -582,6 +585,29 @@ export class BigQueryReadClient { // -- Path templates -- // -------------------- + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + /** * Return a fully-qualified readSession resource name string. * diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 0bad12e8eab..2e99c9d35ae 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -153,6 +153,9 @@ export class BigQueryStorageClient { // identifiers to uniquely identify resources within the API. // Create useful helper objects for these. this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), readSessionPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}' ), @@ -801,6 +804,29 @@ export class BigQueryStorageClient { // -- Path templates -- // -------------------- + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + /** * Return a fully-qualified readSession resource name string. * diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index b9498181ba3..e490e21ec1d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,23 +3,15 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "2c4f58676e3fdb4bbf1d565d087e44587a854e7f" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "eb37e688331443969eed9b969531751154a956d5", - "internalRef": "317130948" + "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", + "sha": "6dcda30d8df32fff0599df7f3f185813fa827da6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "dc9caca650c77b7039e2bbc3339ffb34ae78e5b7" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ], @@ -27,19 +19,19 @@ { "client": { "source": "googleapis", - "apiName": "bigquerystorage", + "apiName": "bigquery-storage", "apiVersion": "v1beta1", - "language": "typescript", - "generator": "gapic-generator-typescript" + "language": "nodejs", + "generator": "bazel" } }, { "client": { "source": "googleapis", - "apiName": "bigquerystorage", + "apiName": "bigquery-storage", "apiVersion": "v1", - "language": "typescript", - "generator": "gapic-generator-typescript" + "language": "nodejs", + "generator": "bazel" } } ] diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/synth.py index 01ba810400a..1cb8ff00d59 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/synth.py @@ -21,19 +21,11 @@ AUTOSYNTH_MULTIPLE_COMMITS = True # Run the gapic generator -gapic = gcp.GAPICMicrogenerator() -name = 'bigquerystorage' +gapic = gcp.GAPICBazel() +name = 'bigquery-storage' versions = ['v1beta1', 'v1'] for version in versions: - library = gapic.typescript_library( - name, - version, - proto_path=f'google/cloud/bigquery/storage/{version}', - generator_args={ - 'grpc-service-config': f'google/cloud/bigquery/storage/{version}/{name}_grpc_service_config.json', - 'package-name': f'@google-cloud/bigquery-storage', - }, - ) + library = gapic.node_library(name, version, proto_path=f'google/cloud/bigquery/storage/{version}') s.copy(library, excludes=['package.json', 'README.md', 'src/index.ts']) # Copy common templates common_templates = gcp.CommonTemplates() diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index bac7067ae3c..fb204c5c622 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -474,6 +474,44 @@ describe('v1.BigQueryReadClient', () => { }); describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + describe('readSession', () => { const fakePath = '/rendered/path/readSession'; const expectedParameters = { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index e122cb23bd2..117757e569b 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -727,6 +727,44 @@ describe('v1beta1.BigQueryStorageClient', () => { }); describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + describe('readSession', () => { const fakePath = '/rendered/path/readSession'; const expectedParameters = { From 147f1af2a280007a4687f35984de979e691c7b5d Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 6 Jul 2020 16:42:04 -0700 Subject: [PATCH 054/333] chore: update CODEOWNERS (#82) Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/.github/CODEOWNERS | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 handwritten/bigquery-storage/.github/CODEOWNERS diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS new file mode 100644 index 00000000000..892293aecca --- /dev/null +++ b/handwritten/bigquery-storage/.github/CODEOWNERS @@ -0,0 +1,9 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The yoshi-nodejs team is the default owner for nodejs repositories. +* @googleapis/yoshi-nodejs @googleapis/api-bigquery From a853fca6e5eaeb02cd56955909255da489ce6410 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 9 Jul 2020 05:12:09 -0700 Subject: [PATCH 055/333] fix: typeo in nodejs .gitattribute (#84) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/cc99acfa-05b8-434b-9500-2f6faf2eaa02/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/799d8e6522c1ef7cb55a70d9ea0b15e045c3d00b --- handwritten/bigquery-storage/.gitattributes | 2 +- handwritten/bigquery-storage/synth.metadata | 14 +++++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/.gitattributes b/handwritten/bigquery-storage/.gitattributes index 2e63216ae9c..d4f4169b28b 100644 --- a/handwritten/bigquery-storage/.gitattributes +++ b/handwritten/bigquery-storage/.gitattributes @@ -1,3 +1,3 @@ *.ts text eol=lf -*.js test eol=lf +*.js text eol=lf protos/* linguist-generated diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index e490e21ec1d..5956712853d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,15 +3,23 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", - "sha": "6dcda30d8df32fff0599df7f3f185813fa827da6" + "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", + "sha": "3db6706e8dfbc8b9ac24c0bc41d21207b7b0820d" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "4f4aa3a03e470f1390758b9d89eb1aa88837a5be", + "internalRef": "320300472" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "799d8e6522c1ef7cb55a70d9ea0b15e045c3d00b" } } ], From 55ef435829d70e3aecc869a45a17b3ad78e2504b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 10 Jul 2020 01:13:36 +0000 Subject: [PATCH 056/333] chore: release 2.2.1 (#85) :robot: I have created a release \*beep\* \*boop\* --- ### [2.2.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.0...v2.2.1) (2020-07-09) ### Bug Fixes * typeo in nodejs .gitattribute ([#84](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/84)) ([ab36886](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ab36886171cc6d94f66587f715d23e8cd4603f32)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index c37dd9bf6c8..99a9dffbcaf 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.2.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.0...v2.2.1) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#84](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/84)) ([ab36886](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ab36886171cc6d94f66587f715d23e8cd4603f32)) + ## [2.2.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.1.0...v2.2.0) (2020-06-19) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index aa9ebc44f91..b5b25e9b05a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.2.0", + "version": "2.2.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 95a5e92520bb3d118e81f55dbc37cd1baeccf5a8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 10 Jul 2020 21:38:32 +0200 Subject: [PATCH 057/333] chore(deps): update dependency ts-loader to v8 (#83) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [ts-loader](https://togithub.com/TypeStrong/ts-loader) | devDependencies | major | [`^7.0.0` -> `^8.0.0`](https://renovatebot.com/diffs/npm/ts-loader/7.0.5/8.0.0) | --- ### Release Notes
TypeStrong/ts-loader ### [`v8.0.0`](https://togithub.com/TypeStrong/ts-loader/blob/master/CHANGELOG.md#v800) [Compare Source](https://togithub.com/TypeStrong/ts-loader/compare/v7.0.5...v8.0.0) - [Support for symlinks in project references](https://togithub.com/TypeStrong/ts-loader/pull/1136) - thanks [@​sheetalkamat](https://togithub.com/sheetalkamat)! - `ts-loader` now supports TypeScript 3.6 and greater **BREAKING CHANGE**
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index b5b25e9b05a..3b545834dcc 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -43,7 +43,7 @@ "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^9.0.1", - "ts-loader": "^7.0.0", + "ts-loader": "^8.0.0", "typescript": "^3.8.3", "webpack": "^4.41.6", "webpack-cli": "^3.3.11" From a10f827960e94c8cab1ee56f9c8084e1fe75ad75 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 12 Jul 2020 18:49:39 +0200 Subject: [PATCH 058/333] chore(deps): update dependency @types/mocha to v8 (#86) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [@types/mocha](https://togithub.com/DefinitelyTyped/DefinitelyTyped) | devDependencies | major | [`^7.0.1` -> `^8.0.0`](https://renovatebot.com/diffs/npm/@types%2fmocha/7.0.2/8.0.0) | --- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 3b545834dcc..2473080efa7 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -30,7 +30,7 @@ "google-gax": "^2.1.0" }, "devDependencies": { - "@types/mocha": "^7.0.1", + "@types/mocha": "^8.0.0", "@types/node": "^13.7.1", "@types/sinon": "^9.0.0", "c8": "^7.1.0", From 0cf275dac0d88370470b7be393cba655eab7d299 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 14 Jul 2020 05:44:12 -0700 Subject: [PATCH 059/333] build: missing closing paren in publish script (#88) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/9c6207e5-a7a6-4e44-ab6b-91751e0230b1/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/d82deccf657a66e31bd5da9efdb96c6fa322fc7e --- handwritten/bigquery-storage/.kokoro/publish.sh | 2 +- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index 24957d71def..f056d861729 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -24,7 +24,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / cd $(dirname $0)/.. -NPM_TOKEN=$(cat $KOKORO_GFILE_DIR/secret_manager/npm_publish_token +NPM_TOKEN=$(cat $KOKORO_GFILE_DIR/secret_manager/npm_publish_token) echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc npm install diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 5956712853d..7abf2ba253f 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "3db6706e8dfbc8b9ac24c0bc41d21207b7b0820d" + "sha": "c5e781c5290cd898279272c63169d6c932478ae8" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "799d8e6522c1ef7cb55a70d9ea0b15e045c3d00b" + "sha": "d82deccf657a66e31bd5da9efdb96c6fa322fc7e" } } ], From fb07eff01e9111137399ca564eb806833f8d221f Mon Sep 17 00:00:00 2001 From: "F. Hinkelmann" Date: Tue, 14 Jul 2020 14:44:28 -0400 Subject: [PATCH 060/333] chore: delete Node 8 presubmit tests (#87) --- .../.kokoro/presubmit/node8/common.cfg | 24 ------------------- .../.kokoro/presubmit/node8/test.cfg | 0 2 files changed, 24 deletions(-) delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg deleted file mode 100644 index d9c4fb600d5..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 From 344fb576fc0cb5b774d1843ff431a87fe021e807 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 17 Jul 2020 15:10:37 -0700 Subject: [PATCH 061/333] chore: add config files for cloud-rad for node.js, delete Node 8 templates (#92) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5e903fff-57bb-4395-bb94-8b4d1909dbf6/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/21f1470ecd01424dc91c70f1a7c798e4e87d1eec Source-Link: https://github.com/googleapis/synthtool/commit/388e10f5ae302d3e8de1fac99f3a95d1ab8f824a --- .../.kokoro/release/docs-devsite.cfg | 26 ++ .../.kokoro/release/docs-devsite.sh | 62 +++ .../bigquery-storage/api-extractor.json | 369 ++++++++++++++++++ handwritten/bigquery-storage/synth.metadata | 4 +- 4 files changed, 459 insertions(+), 2 deletions(-) create mode 100644 handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh create mode 100644 handwritten/bigquery-storage/api-extractor.json diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg new file mode 100644 index 00000000000..77a501f8f20 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-scheduler/.kokoro/trampoline.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-scheduler/.kokoro/release/docs-devsite.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh new file mode 100755 index 00000000000..b679c48c044 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# build jsdocs (Python is installed on the Node 10 docker image). +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=/home/node/.npm-global + export PATH="$PATH:/home/node/.npm-global/bin" + cd $(dirname $0)/../.. +fi + +mkdir ./etc + +npm install +npm run api-extractor +npm run api-documenter + +npm i json@9.0.6 -g +NAME=$(cat .repo-metadata.json | json name) + +mkdir ./_devsite +cp ./yaml/$NAME/* ./_devsite +cp ./yaml/toc.yml ./_devsite/_toc.yaml + +# create docs.metadata, based on package.json and .repo-metadata.json. +pip install -U pip +python3 -m pip install --user gcp-docuploader +python3 -m docuploader create-metadata \ + --name=$NAME \ + --version=$(cat package.json | json version) \ + --language=$(cat .repo-metadata.json | json language) \ + --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ + --product-page=$(cat .repo-metadata.json | json product_documentation) \ + --github-repository=$(cat .repo-metadata.json | json repo) \ + --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) +cp docs.metadata ./_devsite/docs.metadata + +# deploy the docs. +if [[ -z "$CREDENTIALS" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi +if [[ -z "$BUCKET" ]]; then + BUCKET=docs-staging-v2-staging +fi + +python3 -m docuploader upload ./_devsite --destination-prefix docfx --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/handwritten/bigquery-storage/api-extractor.json b/handwritten/bigquery-storage/api-extractor.json new file mode 100644 index 00000000000..de228294b23 --- /dev/null +++ b/handwritten/bigquery-storage/api-extractor.json @@ -0,0 +1,369 @@ +/** + * Config file for API Extractor. For more info, please visit: https://api-extractor.com + */ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + + /** + * Optionally specifies another JSON config file that this file extends from. This provides a way for + * standard settings to be shared across multiple projects. + * + * If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains + * the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be + * resolved using NodeJS require(). + * + * SUPPORTED TOKENS: none + * DEFAULT VALUE: "" + */ + // "extends": "./shared/api-extractor-base.json" + // "extends": "my-package/include/api-extractor-base.json" + + /** + * Determines the "" token that can be used with other config file settings. The project folder + * typically contains the tsconfig.json and package.json config files, but the path is user-defined. + * + * The path is resolved relative to the folder of the config file that contains the setting. + * + * The default value for "projectFolder" is the token "", which means the folder is determined by traversing + * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder + * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error + * will be reported. + * + * SUPPORTED TOKENS: + * DEFAULT VALUE: "" + */ + // "projectFolder": "..", + + /** + * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor + * analyzes the symbols exported by this module. + * + * The file extension must be ".d.ts" and not ".ts". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + */ + "mainEntryPointFilePath": "/protos/protos.d.ts", + + /** + * A list of NPM package names whose exports should be treated as part of this package. + * + * For example, suppose that Webpack is used to generate a distributed bundle for the project "library1", + * and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part + * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly + * imports library2. To avoid this, we can specify: + * + * "bundledPackages": [ "library2" ], + * + * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been + * local files for library1. + */ + "bundledPackages": [ ], + + /** + * Determines how the TypeScript compiler engine will be invoked by API Extractor. + */ + "compiler": { + /** + * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * Note: This setting will be ignored if "overrideTsconfig" is used. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/tsconfig.json" + */ + // "tsconfigFilePath": "/tsconfig.json", + + /** + * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk. + * The object must conform to the TypeScript tsconfig schema: + * + * http://json.schemastore.org/tsconfig + * + * If omitted, then the tsconfig.json file will be read from the "projectFolder". + * + * DEFAULT VALUE: no overrideTsconfig section + */ + // "overrideTsconfig": { + // . . . + // } + + /** + * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended + * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when + * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses + * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck. + * + * DEFAULT VALUE: false + */ + // "skipLibCheck": true, + }, + + /** + * Configures how the API report file (*.api.md) will be generated. + */ + "apiReport": { + /** + * (REQUIRED) Whether to generate an API report. + */ + "enabled": true, + + /** + * The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce + * a full file path. + * + * The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/". + * + * SUPPORTED TOKENS: , + * DEFAULT VALUE: ".api.md" + */ + // "reportFileName": ".api.md", + + /** + * Specifies the folder where the API report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy, + * e.g. for an API review. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/etc/" + */ + // "reportFolder": "/etc/", + + /** + * Specifies the folder where the temporary report file is written. The file name portion is determined by + * the "reportFileName" setting. + * + * After the temporary file is written to disk, it is compared with the file in the "reportFolder". + * If they are different, a production build will fail. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/" + */ + // "reportTempFolder": "/temp/" + }, + + /** + * Configures how the doc model file (*.api.json) will be generated. + */ + "docModel": { + /** + * (REQUIRED) Whether to generate a doc model file. + */ + "enabled": true, + + /** + * The output path for the doc model file. The file extension should be ".api.json". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/temp/.api.json" + */ + // "apiJsonFilePath": "/temp/.api.json" + }, + + /** + * Configures how the .d.ts rollup file will be generated. + */ + "dtsRollup": { + /** + * (REQUIRED) Whether to generate the .d.ts rollup file. + */ + "enabled": true, + + /** + * Specifies the output path for a .d.ts rollup file to be generated without any trimming. + * This file will include all declarations that are exported by the main entry point. + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "/dist/.d.ts" + */ + // "untrimmedFilePath": "/dist/.d.ts", + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release. + * This file will include only declarations that are marked as "@public" or "@beta". + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "betaTrimmedFilePath": "/dist/-beta.d.ts", + + + /** + * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release. + * This file will include only declarations that are marked as "@public". + * + * If the path is an empty string, then this file will not be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "publicTrimmedFilePath": "/dist/-public.d.ts", + + /** + * When a declaration is trimmed, by default it will be replaced by a code comment such as + * "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the + * declaration completely. + * + * DEFAULT VALUE: false + */ + // "omitTrimmingComments": true + }, + + /** + * Configures how the tsdoc-metadata.json file will be generated. + */ + "tsdocMetadata": { + /** + * Whether to generate the tsdoc-metadata.json file. + * + * DEFAULT VALUE: true + */ + // "enabled": true, + + /** + * Specifies where the TSDoc metadata file should be written. + * + * The path is resolved relative to the folder of the config file that contains the setting; to change this, + * prepend a folder token such as "". + * + * The default value is "", which causes the path to be automatically inferred from the "tsdocMetadata", + * "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup + * falls back to "tsdoc-metadata.json" in the package folder. + * + * SUPPORTED TOKENS: , , + * DEFAULT VALUE: "" + */ + // "tsdocMetadataFilePath": "/dist/tsdoc-metadata.json" + }, + + /** + * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files + * will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead. + * To use the OS's default newline kind, specify "os". + * + * DEFAULT VALUE: "crlf" + */ + // "newlineKind": "crlf", + + /** + * Configures how API Extractor reports error and warning messages produced during analysis. + * + * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages. + */ + "messages": { + /** + * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing + * the input .d.ts files. + * + * TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "compilerMessageReporting": { + /** + * Configures the default routing for messages that don't match an explicit rule in this table. + */ + "default": { + /** + * Specifies whether the message should be written to the the tool's output log. Note that + * the "addToApiReportFile" property may supersede this option. + * + * Possible values: "error", "warning", "none" + * + * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail + * and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes + * the "--local" option), the warning is displayed but the build will not fail. + * + * DEFAULT VALUE: "warning" + */ + "logLevel": "warning", + + /** + * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md), + * then the message will be written inside that file; otherwise, the message is instead logged according to + * the "logLevel" option. + * + * DEFAULT VALUE: false + */ + // "addToApiReportFile": false + }, + + // "TS2551": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by API Extractor during its analysis. + * + * API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag" + * + * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings + */ + "extractorMessageReporting": { + "default": { + "logLevel": "warning", + // "addToApiReportFile": false + }, + + // "ae-extra-release-tag": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + }, + + /** + * Configures handling of messages reported by the TSDoc parser when analyzing code comments. + * + * TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text" + * + * DEFAULT VALUE: A single "default" entry with logLevel=warning. + */ + "tsdocMessageReporting": { + "default": { + "logLevel": "warning", + // "addToApiReportFile": false + } + + // "tsdoc-link-tag-unescaped-text": { + // "logLevel": "warning", + // "addToApiReportFile": true + // }, + // + // . . . + } + } + +} diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7abf2ba253f..371fd4f1397 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "c5e781c5290cd898279272c63169d6c932478ae8" + "sha": "02eff9cbb2c9961bd528ba19b27c1406cdae0713" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d82deccf657a66e31bd5da9efdb96c6fa322fc7e" + "sha": "21f1470ecd01424dc91c70f1a7c798e4e87d1eec" } } ], From 3d3b778887b1d81b7ac439257ae8a03285675ec8 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 17 Jul 2020 15:24:23 -0700 Subject: [PATCH 062/333] build: add Node 8 tests (#91) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5e903fff-57bb-4395-bb94-8b4d1909dbf6/targets - [ ] To automatically regenerate this PR, check this box. --- .../.kokoro/presubmit/node8/common.cfg | 24 +++++++++++++++++++ .../.kokoro/presubmit/node8/test.cfg | 0 2 files changed, 24 insertions(+) create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg new file mode 100644 index 00000000000..d9c4fb600d5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg new file mode 100644 index 00000000000..e69de29bb2d From 8b3f838da508cd7108436d86d0e0e5b99aecaf0a Mon Sep 17 00:00:00 2001 From: "F. Hinkelmann" Date: Tue, 21 Jul 2020 14:46:48 -0400 Subject: [PATCH 063/333] chore: add dev dependencies for cloud-rad ref docs (#93) --- handwritten/bigquery-storage/package.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2473080efa7..2f2eb506744 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -24,7 +24,9 @@ "system-test": "c8 mocha build/system-test", "test": "c8 mocha build/test", "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", - "precompile": "gts clean" + "precompile": "gts clean", + "api-extractor": "api-extractor run --local", + "api-documenter": "api-documenter yaml --input-folder=temp" }, "dependencies": { "google-gax": "^2.1.0" @@ -46,7 +48,9 @@ "ts-loader": "^8.0.0", "typescript": "^3.8.3", "webpack": "^4.41.6", - "webpack-cli": "^3.3.11" + "webpack-cli": "^3.3.11", + "@microsoft/api-documenter": "^7.8.10", + "@microsoft/api-extractor": "^7.8.10" }, "engines": { "node": ">=10" From e7bd237a7a66748f594077dcad38f907d37f01fb Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 21 Jul 2020 22:20:14 -0700 Subject: [PATCH 064/333] build: rename _toc to toc (#94) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/940354f9-15cd-4361-bbf4-dc9af1426979/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/99c93fe09f8c1dca09dfc0301c8668e3a70dd796 --- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 2 +- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index b679c48c044..3b93137d4db 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -36,7 +36,7 @@ NAME=$(cat .repo-metadata.json | json name) mkdir ./_devsite cp ./yaml/$NAME/* ./_devsite -cp ./yaml/toc.yml ./_devsite/_toc.yaml +cp ./yaml/toc.yml ./_devsite/toc.yml # create docs.metadata, based on package.json and .repo-metadata.json. pip install -U pip diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 371fd4f1397..08c30e4278d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "02eff9cbb2c9961bd528ba19b27c1406cdae0713" + "sha": "4b083b1431952cc0b62dd9bbebdbccc3832cc244" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "21f1470ecd01424dc91c70f1a7c798e4e87d1eec" + "sha": "99c93fe09f8c1dca09dfc0301c8668e3a70dd796" } } ], From d162031993012d96cf82f8ba65cbe7daf429d396 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 23 Jul 2020 14:59:17 -0700 Subject: [PATCH 065/333] build: move gitattributes files to node templates (#95) Source-Author: F. Hinkelmann Source-Date: Thu Jul 23 01:45:04 2020 -0400 Source-Repo: googleapis/synthtool Source-Sha: 3a00b7fea8c4c83eaff8eb207f530a2e3e8e1de3 Source-Link: https://github.com/googleapis/synthtool/commit/3a00b7fea8c4c83eaff8eb207f530a2e3e8e1de3 --- handwritten/bigquery-storage/.gitattributes | 1 + handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.gitattributes b/handwritten/bigquery-storage/.gitattributes index d4f4169b28b..33739cb74e4 100644 --- a/handwritten/bigquery-storage/.gitattributes +++ b/handwritten/bigquery-storage/.gitattributes @@ -1,3 +1,4 @@ *.ts text eol=lf *.js text eol=lf protos/* linguist-generated +**/api-extractor.json linguist-language=JSON-with-Comments diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 08c30e4278d..20b4b3e96b8 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "4b083b1431952cc0b62dd9bbebdbccc3832cc244" + "sha": "727484c227636b675083ec236a8f3aebfefe6324" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "99c93fe09f8c1dca09dfc0301c8668e3a70dd796" + "sha": "3a00b7fea8c4c83eaff8eb207f530a2e3e8e1de3" } } ], From 49d2add6bd53e03f1e8786c62c4c02c3acb28a87 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 29 Jul 2020 17:07:33 -0700 Subject: [PATCH 066/333] chore(node): fix kokoro build path for cloud-rad (#96) Source-Author: F. Hinkelmann Source-Date: Wed Jul 29 00:28:42 2020 -0400 Source-Repo: googleapis/synthtool Source-Sha: 89d431fb2975fc4e0ed24995a6e6dfc8ff4c24fa Source-Link: https://github.com/googleapis/synthtool/commit/89d431fb2975fc4e0ed24995a6e6dfc8ff4c24fa --- handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg | 4 ++-- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg index 77a501f8f20..7c4d2937c97 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -18,9 +18,9 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-scheduler/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-scheduler/.kokoro/release/docs-devsite.sh" + value: "github/nodejs-bigquery-storage/.kokoro/release/docs-devsite.sh" } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 20b4b3e96b8..0ddb321c9ac 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "727484c227636b675083ec236a8f3aebfefe6324" + "sha": "4e992afd67dc4c34817e73884a63b417638452d8" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "3a00b7fea8c4c83eaff8eb207f530a2e3e8e1de3" + "sha": "89d431fb2975fc4e0ed24995a6e6dfc8ff4c24fa" } } ], From cd02d53b3acdb3e62f6ee991484e4cdb975e2988 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 30 Jul 2020 19:46:29 -0700 Subject: [PATCH 067/333] build: update protos --- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- handwritten/bigquery-storage/synth.metadata | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 7d93ddb63d4..f94b251defa 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -13,7 +13,7 @@ // limitations under the License. import * as Long from "long"; -import * as $protobuf from "protobufjs"; +import {protobuf as $protobuf} from "google-gax"; /** Namespace google. */ export namespace google { diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index faf2f6c3d8b..0927a833d1a 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -19,7 +19,7 @@ define(["protobufjs/minimal"], factory); /* CommonJS */ else if (typeof require === 'function' && typeof module === 'object' && module && module.exports) - module.exports = factory(require("protobufjs/minimal")); + module.exports = factory(require("google-gax").protobufMinimal); })(this, function($protobuf) { "use strict"; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 0ddb321c9ac..d8044e2f5bc 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "4e992afd67dc4c34817e73884a63b417638452d8" + "sha": "0ba802efd080e76ba2b0b6e268ddfbc60168a08d" } }, { From 172d098cbf28b9ce8c069743b92a30c8dca2769a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 31 Jul 2020 09:24:31 -0700 Subject: [PATCH 068/333] docs: add links to the CHANGELOG from the README.md for Java and Node (#98) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/7b446397-88f3-4463-9e7d-d2ce7069989d/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/5936421202fb53ed4641bcb824017dd393a3dbcc --- handwritten/bigquery-storage/README.md | 3 +++ handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index bae285ff153..251a371a879 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -14,6 +14,9 @@ Client for the BigQuery Storage API +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/CHANGELOG.md). + * [Google BigQuery Storage Node.js Client API Reference][client-docs] * [Google BigQuery Storage Documentation][product-docs] * [github.com/googleapis/nodejs-bigquery-storage](https://github.com/googleapis/nodejs-bigquery-storage) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index d8044e2f5bc..d22d57b5a34 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "0ba802efd080e76ba2b0b6e268ddfbc60168a08d" + "sha": "137e4fc42b754a059c8acda04d0f2692e9e2fd77" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "89d431fb2975fc4e0ed24995a6e6dfc8ff4c24fa" + "sha": "5936421202fb53ed4641bcb824017dd393a3dbcc" } } ], From ae800ce2868bcada1a29dfb314242f5c8c2c6b5c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sat, 8 Aug 2020 11:36:33 -0700 Subject: [PATCH 069/333] build: --credential-file-override is no longer required (#101) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/4de22315-84b1-493d-8da2-dfa7688128f5/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/94421c47802f56a44c320257b2b4c190dc7d6b68 --- handwritten/bigquery-storage/.kokoro/populate-secrets.sh | 1 - handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh index e6ce8200d75..6f9d228859d 100755 --- a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh +++ b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh @@ -32,7 +32,6 @@ do --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ gcr.io/google.com/cloudsdktool/cloud-sdk \ secrets versions access latest \ - --credential-file-override=${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json \ --project cloud-devrel-kokoro-resources \ --secret $key > \ "$SECRET_LOCATION/$key" diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index d22d57b5a34..0460272fc3a 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "137e4fc42b754a059c8acda04d0f2692e9e2fd77" + "sha": "a3f7556dc9842d0f784d30afea60c814ab208a5a" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5936421202fb53ed4641bcb824017dd393a3dbcc" + "sha": "94421c47802f56a44c320257b2b4c190dc7d6b68" } } ], From 1e8814c758192d97cfb3b742427615825a0541c6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 11 Aug 2020 11:24:03 -0700 Subject: [PATCH 070/333] build: use gapic-generator-typescript v1.0.7. (#102) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5f7f9c6d-c75a-4c60-8bb8-0026a14cead7/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 325949033 Source-Link: https://github.com/googleapis/googleapis/commit/94006b3cb8d2fb44703cf535da15608eed6bf7db --- .../bigquery-storage/src/v1/big_query_read_client.ts | 5 ++--- .../src/v1beta1/big_query_storage_client.ts | 5 ++--- handwritten/bigquery-storage/synth.metadata | 6 +++--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 7d054605a41..dba8fffa0d0 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -236,12 +236,11 @@ export class BigQueryReadClient { } ); + const descriptor = this.descriptors.stream[methodName] || undefined; const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - this.descriptors.page[methodName] || - this.descriptors.stream[methodName] || - this.descriptors.longrunning[methodName] + descriptor ); this.innerApiCalls[methodName] = apiCall; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 2e99c9d35ae..8964e40f5c5 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -239,12 +239,11 @@ export class BigQueryStorageClient { } ); + const descriptor = this.descriptors.stream[methodName] || undefined; const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - this.descriptors.page[methodName] || - this.descriptors.stream[methodName] || - this.descriptors.longrunning[methodName] + descriptor ); this.innerApiCalls[methodName] = apiCall; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 0460272fc3a..238c96af842 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "a3f7556dc9842d0f784d30afea60c814ab208a5a" + "sha": "4230f845b9365257d6c2686aa4b66e506de2d440" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "4f4aa3a03e470f1390758b9d89eb1aa88837a5be", - "internalRef": "320300472" + "sha": "94006b3cb8d2fb44703cf535da15608eed6bf7db", + "internalRef": "325949033" } }, { From a44290d744504c21d1acbed1096135f366b2fdf5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 12 Aug 2020 09:36:13 -0700 Subject: [PATCH 071/333] chore: update cloud rad kokoro build job (#103) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/b742586e-df31-4aac-8092-78288e9ea8e7/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/bd0deaa1113b588d70449535ab9cbf0f2bd0e72f --- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 5 +++++ handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 3b93137d4db..fa089cf290e 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -36,6 +36,11 @@ NAME=$(cat .repo-metadata.json | json name) mkdir ./_devsite cp ./yaml/$NAME/* ./_devsite + +# Delete SharePoint item, see https://github.com/microsoft/rushstack/issues/1229 +sed -i -e '1,3d' ./yaml/toc.yml +sed -i -e 's/^ //' ./yaml/toc.yml + cp ./yaml/toc.yml ./_devsite/toc.yml # create docs.metadata, based on package.json and .repo-metadata.json. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 238c96af842..1c7559cc906 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "4230f845b9365257d6c2686aa4b66e506de2d440" + "sha": "08746ef0bde0c826aa33b5393cae4361c6b96986" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "94421c47802f56a44c320257b2b4c190dc7d6b68" + "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" } } ], From 38381beccbd5a44310385410cf3c07a9d7f29967 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 13 Aug 2020 09:14:21 -0700 Subject: [PATCH 072/333] build: perform publish using Node 12 (#104) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/c36c6dbc-ab79-4f17-b70b-523b420b2a70/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/5747555f7620113d9a2078a48f4c047a99d31b3e --- handwritten/bigquery-storage/.kokoro/release/publish.cfg | 2 +- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 948aefb0856..84afcf020e7 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -61,7 +61,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } env_vars: { diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 1c7559cc906..7cc9313715d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "08746ef0bde0c826aa33b5393cae4361c6b96986" + "sha": "12e89d855761c4a84db00ed49b847d68069e0754" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" + "sha": "5747555f7620113d9a2078a48f4c047a99d31b3e" } } ], From 7e32780bfdce7192e2c6bb920f6e55f07b687d48 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 19 Aug 2020 22:08:25 -0700 Subject: [PATCH 073/333] chore: start tracking obsolete files --- handwritten/bigquery-storage/synth.metadata | 92 ++++++++++++++++++++- 1 file changed, 88 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7cc9313715d..8013910f8f9 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,22 +4,22 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "12e89d855761c4a84db00ed49b847d68069e0754" + "sha": "ad96022ed862e005d67e8f10bf7e160f362e79e1" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "94006b3cb8d2fb44703cf535da15608eed6bf7db", - "internalRef": "325949033" + "sha": "4c5071b615d96ef9dfd6a63d8429090f1f2872bb", + "internalRef": "327369997" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5747555f7620113d9a2078a48f4c047a99d31b3e" + "sha": "1a60ff2a3975c2f5054431588bd95db9c3b862ba" } } ], @@ -42,5 +42,89 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".eslintignore", + ".eslintrc.json", + ".gitattributes", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/publish.yml", + ".github/release-please.yml", + ".github/workflows/ci.yaml", + ".gitignore", + ".jsdoc.js", + ".kokoro/.gitattributes", + ".kokoro/common.cfg", + ".kokoro/continuous/node10/common.cfg", + ".kokoro/continuous/node10/docs.cfg", + ".kokoro/continuous/node10/lint.cfg", + ".kokoro/continuous/node10/samples-test.cfg", + ".kokoro/continuous/node10/system-test.cfg", + ".kokoro/continuous/node10/test.cfg", + ".kokoro/continuous/node12/common.cfg", + ".kokoro/continuous/node12/test.cfg", + ".kokoro/docs.sh", + ".kokoro/lint.sh", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/node10/common.cfg", + ".kokoro/presubmit/node10/samples-test.cfg", + ".kokoro/presubmit/node10/system-test.cfg", + ".kokoro/presubmit/node12/common.cfg", + ".kokoro/presubmit/node12/test.cfg", + ".kokoro/publish.sh", + ".kokoro/release/docs-devsite.cfg", + ".kokoro/release/docs-devsite.sh", + ".kokoro/release/docs.cfg", + ".kokoro/release/docs.sh", + ".kokoro/release/publish.cfg", + ".kokoro/samples-test.sh", + ".kokoro/system-test.sh", + ".kokoro/test.bat", + ".kokoro/test.sh", + ".kokoro/trampoline.sh", + ".mocharc.js", + ".nycrc", + ".prettierignore", + ".prettierrc.js", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.md", + "LICENSE", + "README.md", + "api-extractor.json", + "linkinator.config.json", + "package-lock.json.2385627015", + "protos/google/cloud/bigquery/storage/v1/arrow.proto", + "protos/google/cloud/bigquery/storage/v1/avro.proto", + "protos/google/cloud/bigquery/storage/v1/storage.proto", + "protos/google/cloud/bigquery/storage/v1/stream.proto", + "protos/google/cloud/bigquery/storage/v1beta1/arrow.proto", + "protos/google/cloud/bigquery/storage/v1beta1/avro.proto", + "protos/google/cloud/bigquery/storage/v1beta1/read_options.proto", + "protos/google/cloud/bigquery/storage/v1beta1/storage.proto", + "protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto", + "protos/protos.d.ts", + "protos/protos.js", + "protos/protos.json", + "renovate.json", + "samples/README.md", + "samples/package-lock.json.1762785766", + "src/v1/big_query_read_client.ts", + "src/v1/big_query_read_client_config.json", + "src/v1/big_query_read_proto_list.json", + "src/v1/index.ts", + "src/v1beta1/big_query_storage_client.ts", + "src/v1beta1/big_query_storage_client_config.json", + "src/v1beta1/big_query_storage_proto_list.json", + "src/v1beta1/index.ts", + "system-test/fixtures/sample/src/index.js", + "system-test/fixtures/sample/src/index.ts", + "system-test/install.ts", + "test/gapic_big_query_read_v1.ts", + "test/gapic_big_query_storage_v1beta1.ts", + "tsconfig.json", + "webpack.config.js" ] } \ No newline at end of file From c5135aeb7bb207ea6acc01d3e4433bd4e5ad0661 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 21 Aug 2020 09:26:36 -0700 Subject: [PATCH 074/333] build: move system and samples test from Node 10 to Node 12 (#106) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/ba2d388f-b3b2-4ad7-a163-0c6b4d86894f/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/05de3e1e14a0b07eab8b474e669164dbd31f81fb --- .../continuous/{node10 => node12}/lint.cfg | 0 .../{node10 => node12}/samples-test.cfg | 0 .../{node10 => node12}/system-test.cfg | 0 .../{node10 => node12}/samples-test.cfg | 0 .../presubmit/{node10 => node12}/system-test.cfg | 0 handwritten/bigquery-storage/synth.metadata | 16 +++++++--------- 6 files changed, 7 insertions(+), 9 deletions(-) rename handwritten/bigquery-storage/.kokoro/continuous/{node10 => node12}/lint.cfg (100%) rename handwritten/bigquery-storage/.kokoro/continuous/{node10 => node12}/samples-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/continuous/{node10 => node12}/system-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node10 => node12}/samples-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node10 => node12}/system-test.cfg (100%) diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node10/lint.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node10/samples-test.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node10/system-test.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/presubmit/node10/samples-test.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/presubmit/node10/system-test.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 8013910f8f9..8c603f39c28 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "ad96022ed862e005d67e8f10bf7e160f362e79e1" + "sha": "059a4caaecff40122f69107ab9ebe91a47f29c5c" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "1a60ff2a3975c2f5054431588bd95db9c3b862ba" + "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb" } } ], @@ -60,19 +60,19 @@ ".kokoro/common.cfg", ".kokoro/continuous/node10/common.cfg", ".kokoro/continuous/node10/docs.cfg", - ".kokoro/continuous/node10/lint.cfg", - ".kokoro/continuous/node10/samples-test.cfg", - ".kokoro/continuous/node10/system-test.cfg", ".kokoro/continuous/node10/test.cfg", ".kokoro/continuous/node12/common.cfg", + ".kokoro/continuous/node12/lint.cfg", + ".kokoro/continuous/node12/samples-test.cfg", + ".kokoro/continuous/node12/system-test.cfg", ".kokoro/continuous/node12/test.cfg", ".kokoro/docs.sh", ".kokoro/lint.sh", ".kokoro/populate-secrets.sh", ".kokoro/presubmit/node10/common.cfg", - ".kokoro/presubmit/node10/samples-test.cfg", - ".kokoro/presubmit/node10/system-test.cfg", ".kokoro/presubmit/node12/common.cfg", + ".kokoro/presubmit/node12/samples-test.cfg", + ".kokoro/presubmit/node12/system-test.cfg", ".kokoro/presubmit/node12/test.cfg", ".kokoro/publish.sh", ".kokoro/release/docs-devsite.cfg", @@ -95,7 +95,6 @@ "README.md", "api-extractor.json", "linkinator.config.json", - "package-lock.json.2385627015", "protos/google/cloud/bigquery/storage/v1/arrow.proto", "protos/google/cloud/bigquery/storage/v1/avro.proto", "protos/google/cloud/bigquery/storage/v1/storage.proto", @@ -110,7 +109,6 @@ "protos/protos.json", "renovate.json", "samples/README.md", - "samples/package-lock.json.1762785766", "src/v1/big_query_read_client.ts", "src/v1/big_query_read_client_config.json", "src/v1/big_query_read_proto_list.json", From e63ca9ca891bb245e112ec43838f13e947f0bed7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 28 Aug 2020 10:02:06 -0700 Subject: [PATCH 075/333] build: track flaky tests for "nightly", add new secrets for tagging (#107) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/a8ab5675-1982-4cf5-b9b7-36794038b975/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/8cf6d2834ad14318e64429c3b94f6443ae83daf9 --- handwritten/bigquery-storage/.github/publish.yml | 0 handwritten/bigquery-storage/.kokoro/release/publish.cfg | 2 +- handwritten/bigquery-storage/.kokoro/samples-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- handwritten/bigquery-storage/synth.metadata | 5 ++--- 6 files changed, 6 insertions(+), 7 deletions(-) delete mode 100644 handwritten/bigquery-storage/.github/publish.yml diff --git a/handwritten/bigquery-storage/.github/publish.yml b/handwritten/bigquery-storage/.github/publish.yml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 84afcf020e7..d7e414444f2 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -49,7 +49,7 @@ before_action { env_vars: { key: "SECRET_MANAGER_KEYS" - value: "npm_publish_token" + value: "npm_publish_token,releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 86e83c9d3da..c0c40139cb7 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -41,7 +41,7 @@ if [ -f samples/package.json ]; then cd .. # If tests are running against master, configure Build Cop # to open issues on failures: - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index dfae142a231..283f1700fef 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -35,7 +35,7 @@ npm install # If tests are running against master, configure Build Cop # to open issues on failures: -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 8d9c2954579..47be59b987c 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -23,7 +23,7 @@ cd $(dirname $0)/.. npm install # If tests are running against master, configure Build Cop # to open issues on failures: -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 8c603f39c28..8e87a9a8e7d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "059a4caaecff40122f69107ab9ebe91a47f29c5c" + "sha": "890d24ba66d242e29c4c3960c190c936038bf042" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb" + "sha": "8cf6d2834ad14318e64429c3b94f6443ae83daf9" } } ], @@ -51,7 +51,6 @@ ".github/ISSUE_TEMPLATE/feature_request.md", ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", - ".github/publish.yml", ".github/release-please.yml", ".github/workflows/ci.yaml", ".gitignore", From 4f0d0a1205e3b473ffdee8565ba648675a19d512 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Tue, 1 Sep 2020 12:15:32 -0700 Subject: [PATCH 076/333] build: delete Blunderbuss (#109) --- handwritten/bigquery-storage/.github/blunderbuss.yaml | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 handwritten/bigquery-storage/.github/blunderbuss.yaml diff --git a/handwritten/bigquery-storage/.github/blunderbuss.yaml b/handwritten/bigquery-storage/.github/blunderbuss.yaml deleted file mode 100644 index b5a53f68cc0..00000000000 --- a/handwritten/bigquery-storage/.github/blunderbuss.yaml +++ /dev/null @@ -1,8 +0,0 @@ -assign_issues: - - sofisl - - bcoe - - steffnay -assign_prs: - - sofisl - - bcoe - - steffnay From a5a567a0b3a2d0e2c393aff17eb8434e4ae43c99 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sat, 12 Sep 2020 11:20:14 -0700 Subject: [PATCH 077/333] build(test): recursively find test files; fail on unsupported dependency versions (#111) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/4ed21f23-f327-48f6-8054-258670f2c8a0/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/fdd03c161003ab97657cc0218f25c82c89ddf4b6 --- handwritten/bigquery-storage/.mocharc.js | 3 ++- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.mocharc.js b/handwritten/bigquery-storage/.mocharc.js index ff7b34fa5d1..0b600509bed 100644 --- a/handwritten/bigquery-storage/.mocharc.js +++ b/handwritten/bigquery-storage/.mocharc.js @@ -14,7 +14,8 @@ const config = { "enable-source-maps": true, "throw-deprecation": true, - "timeout": 10000 + "timeout": 10000, + "recursive": true } if (process.env.MOCHA_THROW_DEPRECATION === 'false') { delete config['throw-deprecation']; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 8e87a9a8e7d..1c6c1282916 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "890d24ba66d242e29c4c3960c190c936038bf042" + "sha": "58c08c4008a0f028dbe42cb6ec1f585aed5be41e" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "8cf6d2834ad14318e64429c3b94f6443ae83daf9" + "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6" } } ], From 0d63a931897e1bbe8d95888fbdd271a2d1c68e83 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 1 Oct 2020 05:06:33 -0700 Subject: [PATCH 078/333] chore: update bucket for cloud-rad (#112) Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> Source-Author: F. Hinkelmann Source-Date: Wed Sep 30 14:13:57 2020 -0400 Source-Repo: googleapis/synthtool Source-Sha: 079dcce498117f9570cebe6e6cff254b38ba3860 Source-Link: https://github.com/googleapis/synthtool/commit/079dcce498117f9570cebe6e6cff254b38ba3860 --- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 2 +- handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index fa089cf290e..458fe4f9062 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -61,7 +61,7 @@ if [[ -z "$CREDENTIALS" ]]; then CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account fi if [[ -z "$BUCKET" ]]; then - BUCKET=docs-staging-v2-staging + BUCKET=docs-staging-v2 fi python3 -m docuploader upload ./_devsite --destination-prefix docfx --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 1c6c1282916..61a156293d2 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "58c08c4008a0f028dbe42cb6ec1f585aed5be41e" + "sha": "0430db108fc6dd6c00b0458e79b4fee8913ed016" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6" + "sha": "079dcce498117f9570cebe6e6cff254b38ba3860" } } ], From aedba95f0abb99d28dc0eb40785980f3fa170c97 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 5 Oct 2020 10:42:35 -0700 Subject: [PATCH 079/333] build(node_library): migrate to Trampoline V2 (#113) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/e2b098f4-76c4-4fe5-820c-ae293231f7bd/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/0c868d49b8e05bc1f299bc773df9eb4ef9ed96e9 --- .../bigquery-storage/.kokoro/common.cfg | 2 +- .../.kokoro/continuous/node10/common.cfg | 2 +- .../.kokoro/continuous/node12/common.cfg | 2 +- handwritten/bigquery-storage/.kokoro/docs.sh | 2 +- handwritten/bigquery-storage/.kokoro/lint.sh | 2 +- .../.kokoro/populate-secrets.sh | 65 ++- .../.kokoro/presubmit/node10/common.cfg | 2 +- .../.kokoro/presubmit/node12/common.cfg | 2 +- .../bigquery-storage/.kokoro/publish.sh | 2 +- .../.kokoro/release/docs-devsite.cfg | 2 +- .../.kokoro/release/docs-devsite.sh | 4 +- .../bigquery-storage/.kokoro/release/docs.cfg | 2 +- .../bigquery-storage/.kokoro/release/docs.sh | 4 +- .../.kokoro/release/publish.cfg | 6 +- .../bigquery-storage/.kokoro/samples-test.sh | 2 +- .../bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- .../bigquery-storage/.kokoro/trampoline.sh | 4 + .../bigquery-storage/.kokoro/trampoline_v2.sh | 488 ++++++++++++++++++ handwritten/bigquery-storage/.trampolinerc | 51 ++ handwritten/bigquery-storage/synth.metadata | 6 +- 21 files changed, 616 insertions(+), 38 deletions(-) create mode 100755 handwritten/bigquery-storage/.kokoro/trampoline_v2.sh create mode 100644 handwritten/bigquery-storage/.trampolinerc diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg index 81699465317..c38e1a2c9b4 100644 --- a/handwritten/bigquery-storage/.kokoro/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg index e9656f0edfc..acc3b554c61 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg @@ -21,7 +21,7 @@ before_action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg index 6df937a77d6..7fc0cdeac69 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/docs.sh b/handwritten/bigquery-storage/.kokoro/docs.sh index 952403faede..85901242b5e 100755 --- a/handwritten/bigquery-storage/.kokoro/docs.sh +++ b/handwritten/bigquery-storage/.kokoro/docs.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. diff --git a/handwritten/bigquery-storage/.kokoro/lint.sh b/handwritten/bigquery-storage/.kokoro/lint.sh index b03cb0439a6..aef4866e4c4 100755 --- a/handwritten/bigquery-storage/.kokoro/lint.sh +++ b/handwritten/bigquery-storage/.kokoro/lint.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. diff --git a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh index 6f9d228859d..deb2b199eb4 100755 --- a/handwritten/bigquery-storage/.kokoro/populate-secrets.sh +++ b/handwritten/bigquery-storage/.kokoro/populate-secrets.sh @@ -13,31 +13,64 @@ # See the License for the specific language governing permissions and # limitations under the License. +# This file is called in the early stage of `trampoline_v2.sh` to +# populate secrets needed for the CI builds. + set -eo pipefail function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} function msg { println "$*" >&2 ;} function println { printf '%s\n' "$(now) $*" ;} +# Populates requested secrets set in SECRET_MANAGER_KEYS + +# In Kokoro CI builds, we use the service account attached to the +# Kokoro VM. This means we need to setup auth on other CI systems. +# For local run, we just use the gcloud command for retrieving the +# secrets. + +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + GCLOUD_COMMANDS=( + "docker" + "run" + "--entrypoint=gcloud" + "--volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR}" + "gcr.io/google.com/cloudsdktool/cloud-sdk" + ) + if [[ "${TRAMPOLINE_CI:-}" == "kokoro" ]]; then + SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" + else + echo "Authentication for this CI system is not implemented yet." + exit 2 + # TODO: Determine appropriate SECRET_LOCATION and the GCLOUD_COMMANDS. + fi +else + # For local run, use /dev/shm or temporary directory for + # KOKORO_GFILE_DIR. + if [[ -d "/dev/shm" ]]; then + export KOKORO_GFILE_DIR=/dev/shm + else + export KOKORO_GFILE_DIR=$(mktemp -d -t ci-XXXXXXXX) + fi + SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" + GCLOUD_COMMANDS=("gcloud") +fi -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" mkdir -p ${SECRET_LOCATION} + for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret $key > \ - "$SECRET_LOCATION/$key" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi + msg "Retrieving secret ${key}" + "${GCLOUD_COMMANDS[@]}" \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret $key > \ + "$SECRET_LOCATION/$key" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + exit 2 + fi done diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg index e9656f0edfc..acc3b554c61 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg @@ -21,7 +21,7 @@ before_action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg index 6df937a77d6..7fc0cdeac69 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index f056d861729..4db6bf1c7f5 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Start the releasetool reporter python3 -m pip install gcp-releasetool diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg index 7c4d2937c97..f8aaa1f5eb5 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -18,7 +18,7 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 458fe4f9062..0d11b7ae951 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -20,8 +20,8 @@ set -eo pipefail if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. - export NPM_CONFIG_PREFIX=/home/node/.npm-global - export PATH="$PATH:/home/node/.npm-global/bin" + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" cd $(dirname $0)/../.. fi diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg index 88eb54b1cd4..21d0eb33836 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -18,7 +18,7 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh index 4d3a0868531..4c866c86000 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs.sh @@ -20,8 +20,8 @@ set -eo pipefail if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. - export NPM_CONFIG_PREFIX=/home/node/.npm-global - export PATH="$PATH:/home/node/.npm-global/bin" + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" cd $(dirname $0)/../.. fi npm install diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index d7e414444f2..bbfadda164d 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -27,7 +27,7 @@ before_action { } } -# Fetch magictoken to use with Magic Github Proxy +# Fetch magictoken to use with Magic Github Proxy before_action { fetch_keystore { keystore_resource { @@ -37,7 +37,7 @@ before_action { } } -# Fetch api key to use with Magic Github Proxy +# Fetch api key to use with Magic Github Proxy before_action { fetch_keystore { keystore_resource { @@ -56,7 +56,7 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index c0c40139cb7..bab7ba4e967 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 283f1700fef..8a08400484a 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 47be59b987c..5be385fef64 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -16,7 +16,7 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=/home/node/.npm-global +export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. diff --git a/handwritten/bigquery-storage/.kokoro/trampoline.sh b/handwritten/bigquery-storage/.kokoro/trampoline.sh index a4241db23f4..f693a1ce7aa 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# This file is not used any more, but we keep this file for making it +# easy to roll back. +# TODO: Remove this file from the template. + set -eo pipefail # Always run the cleanup script, regardless of the success of bouncing into diff --git a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh new file mode 100755 index 00000000000..5ae75f977d7 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh @@ -0,0 +1,488 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# If you want to make a change to this file, consider doing so at: +# https://github.com/googlecloudplatform/docker-ci-helper +# +# This script is for running CI builds. For Kokoro builds, we +# set this script to `build_file` field in the Kokoro configuration. + +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. +# +# Here is an example for running this script. +# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ +# TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ +# .kokoro/trampoline_v2.sh + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.7" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}" | grep "${TRAMPOLINE_IMAGE%:*}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/handwritten/bigquery-storage/.trampolinerc b/handwritten/bigquery-storage/.trampolinerc new file mode 100644 index 00000000000..164613b9e6a --- /dev/null +++ b/handwritten/bigquery-storage/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "AUTORELEASE_PR" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi + +# Secret Manager secrets. +source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 61a156293d2..36705ddd677 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "0430db108fc6dd6c00b0458e79b4fee8913ed016" + "sha": "590b99c703941451d705cfb62f38176630f4dda5" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "079dcce498117f9570cebe6e6cff254b38ba3860" + "sha": "0c868d49b8e05bc1f299bc773df9eb4ef9ed96e9" } } ], @@ -84,10 +84,12 @@ ".kokoro/test.bat", ".kokoro/test.sh", ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", ".mocharc.js", ".nycrc", ".prettierignore", ".prettierrc.js", + ".trampolinerc", "CODE_OF_CONDUCT.md", "CONTRIBUTING.md", "LICENSE", From 00c1de5233fc80b267737f54113cb2332e5a6600 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 12 Oct 2020 21:54:26 +0200 Subject: [PATCH 080/333] chore(deps): update dependency webpack-cli to v4 (#117) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [webpack-cli](https://togithub.com/webpack/webpack-cli) | devDependencies | major | [`^3.3.11` -> `^4.0.0`](https://renovatebot.com/diffs/npm/webpack-cli/3.3.12/4.0.0) | --- ### Release Notes
webpack/webpack-cli ### [`v4.0.0`](https://togithub.com/webpack/webpack-cli/blob/master/CHANGELOG.md#​400-httpsgithubcomwebpackwebpack-clicomparewebpack-cli400-rc1webpack-cli400-2020-10-10) [Compare Source](https://togithub.com/webpack/webpack-cli/compare/v3.3.12...webpack-cli@4.0.0) ##### Bug Fixes - add compilation lifecycle in watch instance ([#​1903](https://togithub.com/webpack/webpack-cli/issues/1903)) ([02b6d21](https://togithub.com/webpack/webpack-cli/commit/02b6d21eaa20166a7ed37816de716b8fc22b756a)) - cleanup `package-utils` package ([#​1822](https://togithub.com/webpack/webpack-cli/issues/1822)) ([fd5b92b](https://togithub.com/webpack/webpack-cli/commit/fd5b92b3cd40361daec5bf4486e455a41f4c9738)) - cli-executer supplies args further up ([#​1904](https://togithub.com/webpack/webpack-cli/issues/1904)) ([097564a](https://togithub.com/webpack/webpack-cli/commit/097564a851b36b63e0a6bf88144997ef65aa057a)) - exit code for validation errors ([59f6303](https://togithub.com/webpack/webpack-cli/commit/59f63037fcbdbb8934b578b9adf5725bc4ae1235)) - exit process in case of schema errors ([71e89b4](https://togithub.com/webpack/webpack-cli/commit/71e89b4092d953ea587cc4f606451ab78cbcdb93)) ##### Features - assign config paths in build dependencies in cache config ([#​1900](https://togithub.com/webpack/webpack-cli/issues/1900)) ([7e90f11](https://togithub.com/webpack/webpack-cli/commit/7e90f110b119f36ef9def4f66cf4e17ccf1438cd))
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2f2eb506744..ce0956d34e2 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -48,7 +48,7 @@ "ts-loader": "^8.0.0", "typescript": "^3.8.3", "webpack": "^4.41.6", - "webpack-cli": "^3.3.11", + "webpack-cli": "^4.0.0", "@microsoft/api-documenter": "^7.8.10", "@microsoft/api-extractor": "^7.8.10" }, From 19efe5b4e7eaf1cb14de8014f4bbe7262b0e72d8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Oct 2020 23:02:45 +0200 Subject: [PATCH 081/333] chore(deps): update dependency webpack to v5 (#116) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [webpack](https://togithub.com/webpack/webpack) | devDependencies | major | [`^4.41.6` -> `^5.0.0`](https://renovatebot.com/diffs/npm/webpack/4.44.2/5.1.0) | --- ### Release Notes
webpack/webpack ### [`v5.1.0`](https://togithub.com/webpack/webpack/releases/v5.1.0) [Compare Source](https://togithub.com/webpack/webpack/compare/v5.0.0...v5.1.0) ### Features - expose `webpack` property from `Compiler` - expose `cleverMerge`, `EntryOptionPlugin`, `DynamicEntryPlugin` ### Bugfixes - missing `require("..").xxx` in try-catch produces a warning instead of an error now - handle reexports in concatenated modules correctly when they are side-effect-free - fix incorrect deprecation message for ModuleTemplate.hooks.hash ### [`v5.0.0`](https://togithub.com/webpack/webpack/releases/v5.0.0) [Compare Source](https://togithub.com/webpack/webpack/compare/v4.44.2...v5.0.0) [Announcement and changelog](https://webpack.js.org/blog/2020-10-10-webpack-5-release/)
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index ce0956d34e2..d7f80ef108d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -47,7 +47,7 @@ "sinon": "^9.0.1", "ts-loader": "^8.0.0", "typescript": "^3.8.3", - "webpack": "^4.41.6", + "webpack": "^5.0.0", "webpack-cli": "^4.0.0", "@microsoft/api-documenter": "^7.8.10", "@microsoft/api-extractor": "^7.8.10" From c939f96c78ecd673e07edef961412d4dec95a725 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 16 Oct 2020 10:08:47 -0700 Subject: [PATCH 082/333] build: only check --engine-strict for production deps (#119) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/220288a4-7076-443b-9721-9f71deddc661/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/5451633881133e5573cc271a18e73b18caca8b1b --- handwritten/bigquery-storage/synth.metadata | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 36705ddd677..247e555cc14 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "590b99c703941451d705cfb62f38176630f4dda5" + "sha": "cf74b5fe028deb7c3cc9630d74ed89df62d266b0" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0c868d49b8e05bc1f299bc773df9eb4ef9ed96e9" + "sha": "5451633881133e5573cc271a18e73b18caca8b1b" } } ], From a8e83338ed4d0b97a5f79f45b89ac6ff317b1eb8 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 20 Oct 2020 08:38:02 -0700 Subject: [PATCH 083/333] docs: update sample on README to latest (#120) --- handwritten/bigquery-storage/README.md | 76 ++++++++++----------- handwritten/bigquery-storage/synth.metadata | 2 +- 2 files changed, 38 insertions(+), 40 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 251a371a879..3a07845c979 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -66,9 +66,9 @@ const avro = require('avsc'); // See reference documentation at // https://cloud.google.com/bigquery/docs/reference/storage -const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage'); +const {BigQueryReadClient} = require('@google-cloud/bigquery-storage'); -const client = new BigQueryStorageClient(); +const client = new BigQueryReadClient(); async function bigqueryStorageQuickstart() { // Get current project ID. The read session is created in this project. @@ -80,11 +80,7 @@ async function bigqueryStorageQuickstart() { const datasetId = 'usa_names'; const tableId = 'usa_1910_current'; - const tableReference = { - projectId, - datasetId, - tableId, - }; + const tableReference = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; const parent = `projects/${myProjectId}`; @@ -107,18 +103,15 @@ async function bigqueryStorageQuickstart() { // API request. const request = { - tableReference, parent, - readOptions, - tableModifiers, - // This API can also deliver data serialized in Apache Arrow format. - // This example leverages Apache Avro. - format: 'AVRO', - /* We use a LIQUID strategy in this example because we only read from a - * single stream. Consider BALANCED if you're consuming multiple streams - * concurrently and want more consistent stream sizes. - */ - shardingStrategy: 'LIQUID', + readSession: { + table: tableReference, + // This API can also deliver data serialized in Apache Arrow format. + // This example leverages Apache Avro. + dataFormat: 'AVRO', + readOptions, + tableModifiers, + }, }; const [session] = await client.createReadSession(request); @@ -134,16 +127,14 @@ async function bigqueryStorageQuickstart() { let offset = 0; const readRowsRequest = { - // Optional stream name or offset. Offset requested must be less than the last - // row read from readRows(). Requesting a larger offset is undefined. - readPosition: { - stream: session.streams[0], - offset, - }, + // Required stream name and optional offset. Offset requested must be less than + // the last row read from readRows(). Requesting a larger offset is undefined. + readStream: session.streams[0].name, + offset, }; const names = new Set(); - const states = {}; + const states = []; /* We'll use only a single stream for reading data from the table. Because * of dynamic sharding, this will yield all the rows in the table. However, @@ -154,26 +145,33 @@ async function bigqueryStorageQuickstart() { .readRows(readRowsRequest) .on('error', console.error) .on('data', data => { - try { - const decodedData = avroType.decode( - data.avroRows.serializedBinaryRows - ); + offset = data.avroRows.serializedBinaryRows.offset; - names.add(decodedData.value.name); - - if (!states[decodedData.value.state]) { - states[decodedData.value.state] = true; - } - - offset = decodedData.offset; + try { + // Decode all rows in buffer + let pos; + do { + const decodedData = avroType.decode( + data.avroRows.serializedBinaryRows, + pos + ); + + if (decodedData.value) { + names.add(decodedData.value.name); + + if (!states.includes(decodedData.value.state)) { + states.push(decodedData.value.state); + } + } + + pos = decodedData.offset; + } while (pos > 0); } catch (error) { console.log(error); } }) .on('end', () => { - console.log( - `Got ${names.size} unique names in states: ${Object.keys(states)}` - ); + console.log(`Got ${names.size} unique names in states: ${states}`); console.log(`Last offset: ${offset}`); }); } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 247e555cc14..7acfc9e587c 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "cf74b5fe028deb7c3cc9630d74ed89df62d266b0" + "sha": "d405f53a5fac9161012664d7f3f74e7dc6317476" } }, { From cc8d83aea10420a7144f7420befae6162d7970fa Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 21 Oct 2020 16:16:12 -0700 Subject: [PATCH 084/333] chore: clean up Node.js TOC for cloud-rad (#121) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/5f7bb900-b4ec-41d2-acea-c5c2f5dfb4fa/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/901ddd44e9ef7887ee681b9183bbdea99437fdcc Source-Link: https://github.com/googleapis/synthtool/commit/f96d3b455fe27c3dc7bc37c3c9cd27b1c6d269c8 --- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 4 ++++ handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 0d11b7ae951..7657be3377a 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -37,9 +37,13 @@ NAME=$(cat .repo-metadata.json | json name) mkdir ./_devsite cp ./yaml/$NAME/* ./_devsite +# Clean up TOC # Delete SharePoint item, see https://github.com/microsoft/rushstack/issues/1229 sed -i -e '1,3d' ./yaml/toc.yml sed -i -e 's/^ //' ./yaml/toc.yml +# Delete interfaces from TOC (name and uid) +sed -i -e '/name: I[A-Z]/{N;d;}' ./yaml/toc.yml +sed -i -e '/^ *\@google-cloud.*:interface/d' ./yaml/toc.yml cp ./yaml/toc.yml ./_devsite/toc.yml diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7acfc9e587c..f5b61381239 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "d405f53a5fac9161012664d7f3f74e7dc6317476" + "sha": "52d215f6d760856a4e0a18dc39489d2bb000bd79" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5451633881133e5573cc271a18e73b18caca8b1b" + "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" } } ], From 5c8ac3f0d52394d930f4abc7b93f156f4e500837 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 27 Oct 2020 11:12:11 -0700 Subject: [PATCH 085/333] docs: updated code of conduct (includes update to actions) (#124) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/6dcea365-b918-4c52-b1db-d7a62c956000/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/89c849ba5013e45e8fb688b138f33c2ec6083dc5 Source-Link: https://github.com/googleapis/synthtool/commit/a783321fd55f010709294455584a553f4b24b944 Source-Link: https://github.com/googleapis/synthtool/commit/b7413d38b763827c72c0360f0a3d286c84656eeb Source-Link: https://github.com/googleapis/synthtool/commit/5f6ef0ec5501d33c4667885b37a7685a30d41a76 --- .../bigquery-storage/CODE_OF_CONDUCT.md | 123 +++++++++++++----- handwritten/bigquery-storage/synth.metadata | 4 +- 2 files changed, 89 insertions(+), 38 deletions(-) diff --git a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md index 46b2a08ea6d..2add2547a81 100644 --- a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md +++ b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md @@ -1,43 +1,94 @@ -# Contributor Code of Conduct + +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index f5b61381239..db8fb159bf4 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "52d215f6d760856a4e0a18dc39489d2bb000bd79" + "sha": "725361b14d6bcb5d8ed64552d82687c2db4f4b4b" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" + "sha": "89c849ba5013e45e8fb688b138f33c2ec6083dc5" } } ], From 5d0207efa08ab204c7b198add0607674836193f3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 2 Nov 2020 15:59:03 -0800 Subject: [PATCH 086/333] build(node): add KOKORO_BUILD_ARTIFACTS_SUBDIR to env (#125) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/9f0ce149-63bd-4aad-917d-44f82c8912a6/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/ba9918cd22874245b55734f57470c719b577e591 --- handwritten/bigquery-storage/.kokoro/trampoline_v2.sh | 2 ++ handwritten/bigquery-storage/synth.metadata | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh index 5ae75f977d7..606d4321458 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh @@ -125,6 +125,8 @@ pass_down_envvars=( "TRAMPOLINE_CI" # Indicates the version of the script. "TRAMPOLINE_VERSION" + # Contains path to build artifacts being executed. + "KOKORO_BUILD_ARTIFACTS_SUBDIR" ) log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index db8fb159bf4..08f1c1064e3 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "725361b14d6bcb5d8ed64552d82687c2db4f4b4b" + "sha": "10c93ec771f1715a8f97dde1840ed5071275bec8" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "89c849ba5013e45e8fb688b138f33c2ec6083dc5" + "sha": "ba9918cd22874245b55734f57470c719b577e591" } } ], From 3dfb444d46e43a695237f33739263a3e04a03e60 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Fri, 6 Nov 2020 16:34:10 -0800 Subject: [PATCH 087/333] fix: do not modify options object, use defaultScopes (#126) Regenerated the library using [gapic-generator-typescript](https://github.com/googleapis/gapic-generator-typescript) v1.2.1. --- handwritten/bigquery-storage/package.json | 2 +- handwritten/bigquery-storage/src/index.ts | 2 + .../src/v1/big_query_read_client.ts | 89 ++++++++------- .../src/v1beta1/big_query_storage_client.ts | 101 +++++++++++------- handwritten/bigquery-storage/synth.metadata | 16 +-- .../system-test/fixtures/sample/src/index.ts | 9 +- .../bigquery-storage/system-test/install.ts | 18 ++-- 7 files changed, 141 insertions(+), 96 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d7f80ef108d..6f9873f5c5f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -29,7 +29,7 @@ "api-documenter": "api-documenter yaml --input-folder=temp" }, "dependencies": { - "google-gax": "^2.1.0" + "google-gax": "^2.9.2" }, "devDependencies": { "@types/mocha": "^8.0.0", diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 8716f5a6ad2..3c75be7738d 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -19,7 +19,9 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; const BigQueryReadClient = v1.BigQueryReadClient; +type BigQueryReadClient = v1.BigQueryReadClient; const BigQueryStorageClient = v1beta1.BigQueryStorageClient; +type BigQueryStorageClient = v1beta1.BigQueryStorageClient; export {v1, BigQueryReadClient, v1beta1, BigQueryStorageClient}; // For compatibility with JavaScript libraries we need to provide this default export: // tslint:disable-next-line no-default-export diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index dba8fffa0d0..d3cdbc68257 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -53,8 +53,10 @@ export class BigQueryReadClient { /** * Construct an instance of BigQueryReadClient. * - * @param {object} [options] - The configuration object. See the subsequent - * parameters for more details. + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] * @param {string} [options.credentials.private_key] @@ -74,42 +76,33 @@ export class BigQueryReadClient { * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - client configuration override. + * TODO(@alexander-fenster): link to gax documentation. + * @param {boolean} fallback - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. */ - constructor(opts?: ClientOptions) { - // Ensure that options include the service address and port. + // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryReadClient; const servicePath = - opts && opts.servicePath - ? opts.servicePath - : opts && opts.apiEndpoint - ? opts.apiEndpoint - : staticMembers.servicePath; - const port = opts && opts.port ? opts.port : staticMembers.port; - - if (!opts) { - opts = {servicePath, port}; + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? typeof window !== 'undefined'; + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; } - opts.servicePath = opts.servicePath || servicePath; - opts.port = opts.port || port; - - // users can override the config from client side, like retry codes name. - // The detailed structure of the clientConfig can be found here: https://github.com/googleapis/gax-nodejs/blob/master/src/gax.ts#L546 - // The way to override client config for Showcase API: - // - // const customConfig = {"interfaces": {"google.showcase.v1beta1.Echo": {"methods": {"Echo": {"retry_codes_name": "idempotent", "retry_params_name": "default"}}}}} - // const showcaseClient = new showcaseClient({ projectId, customConfig }); - opts.clientConfig = opts.clientConfig || {}; - - // If we're running in browser, it's OK to omit `fallback` since - // google-gax has `browser` field in its `package.json`. - // For Electron (which does not respect `browser` field), - // pass `{fallback: true}` to the BigQueryReadClient constructor. + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. this._gaxModule = opts.fallback ? gax.fallback : gax; - // Create a `gaxGrpc` object, with any grpc-specific options - // sent to the client. - opts.scopes = (this.constructor as typeof BigQueryReadClient).scopes; + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); // Save options to use in initialize() method. @@ -118,6 +111,11 @@ export class BigQueryReadClient { // Save the auth object to the client, for use by other methods. this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; if (typeof process !== 'undefined' && 'versions' in process) { @@ -251,6 +249,7 @@ export class BigQueryReadClient { /** * The DNS address for this API service. + * @returns {string} The DNS address for this service. */ static get servicePath() { return 'bigquerystorage.googleapis.com'; @@ -259,6 +258,7 @@ export class BigQueryReadClient { /** * The DNS address for this API service - same as servicePath(), * exists for compatibility reasons. + * @returns {string} The DNS address for this service. */ static get apiEndpoint() { return 'bigquerystorage.googleapis.com'; @@ -266,6 +266,7 @@ export class BigQueryReadClient { /** * The port for this API service. + * @returns {number} The default port for this service. */ static get port() { return 443; @@ -274,6 +275,7 @@ export class BigQueryReadClient { /** * The scopes needed to make gRPC calls for every method defined * in this service. + * @returns {string[]} List of default scopes. */ static get scopes() { return [ @@ -287,8 +289,7 @@ export class BigQueryReadClient { getProjectId(callback: Callback): void; /** * Return the project ID used by this class. - * @param {function(Error, string)} callback - the callback to - * be called with the current project Id. + * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( callback?: Callback @@ -378,7 +379,11 @@ export class BigQueryReadClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1.ReadSession}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.createReadSession(request); */ createReadSession( request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, @@ -491,7 +496,11 @@ export class BigQueryReadClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.splitReadStream(request); */ splitReadStream( request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, @@ -562,6 +571,13 @@ export class BigQueryReadClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * for more details and examples. + * @example + * const stream = client.readRows(request); + * stream.on('data', (response) => { ... }); + * stream.on('end', () => { ... }); */ readRows( request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, @@ -731,9 +747,10 @@ export class BigQueryReadClient { } /** - * Terminate the GRPC channel and close the client. + * Terminate the gRPC channel and close the client. * * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise { this.initialize(); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 8964e40f5c5..937f3012e43 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -53,8 +53,10 @@ export class BigQueryStorageClient { /** * Construct an instance of BigQueryStorageClient. * - * @param {object} [options] - The configuration object. See the subsequent - * parameters for more details. + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] * @param {string} [options.credentials.private_key] @@ -74,42 +76,33 @@ export class BigQueryStorageClient { * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - client configuration override. + * TODO(@alexander-fenster): link to gax documentation. + * @param {boolean} fallback - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. */ - constructor(opts?: ClientOptions) { - // Ensure that options include the service address and port. + // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryStorageClient; const servicePath = - opts && opts.servicePath - ? opts.servicePath - : opts && opts.apiEndpoint - ? opts.apiEndpoint - : staticMembers.servicePath; - const port = opts && opts.port ? opts.port : staticMembers.port; - - if (!opts) { - opts = {servicePath, port}; + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? typeof window !== 'undefined'; + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; } - opts.servicePath = opts.servicePath || servicePath; - opts.port = opts.port || port; - - // users can override the config from client side, like retry codes name. - // The detailed structure of the clientConfig can be found here: https://github.com/googleapis/gax-nodejs/blob/master/src/gax.ts#L546 - // The way to override client config for Showcase API: - // - // const customConfig = {"interfaces": {"google.showcase.v1beta1.Echo": {"methods": {"Echo": {"retry_codes_name": "idempotent", "retry_params_name": "default"}}}}} - // const showcaseClient = new showcaseClient({ projectId, customConfig }); - opts.clientConfig = opts.clientConfig || {}; - - // If we're running in browser, it's OK to omit `fallback` since - // google-gax has `browser` field in its `package.json`. - // For Electron (which does not respect `browser` field), - // pass `{fallback: true}` to the BigQueryStorageClient constructor. + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. this._gaxModule = opts.fallback ? gax.fallback : gax; - // Create a `gaxGrpc` object, with any grpc-specific options - // sent to the client. - opts.scopes = (this.constructor as typeof BigQueryStorageClient).scopes; + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); // Save options to use in initialize() method. @@ -118,6 +111,11 @@ export class BigQueryStorageClient { // Save the auth object to the client, for use by other methods. this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; if (typeof process !== 'undefined' && 'versions' in process) { @@ -254,6 +252,7 @@ export class BigQueryStorageClient { /** * The DNS address for this API service. + * @returns {string} The DNS address for this service. */ static get servicePath() { return 'bigquerystorage.googleapis.com'; @@ -262,6 +261,7 @@ export class BigQueryStorageClient { /** * The DNS address for this API service - same as servicePath(), * exists for compatibility reasons. + * @returns {string} The DNS address for this service. */ static get apiEndpoint() { return 'bigquerystorage.googleapis.com'; @@ -269,6 +269,7 @@ export class BigQueryStorageClient { /** * The port for this API service. + * @returns {number} The default port for this service. */ static get port() { return 443; @@ -277,6 +278,7 @@ export class BigQueryStorageClient { /** * The scopes needed to make gRPC calls for every method defined * in this service. + * @returns {string[]} List of default scopes. */ static get scopes() { return [ @@ -290,8 +292,7 @@ export class BigQueryStorageClient { getProjectId(callback: Callback): void; /** * Return the project ID used by this class. - * @param {function(Error, string)} callback - the callback to - * be called with the current project Id. + * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( callback?: Callback @@ -383,7 +384,11 @@ export class BigQueryStorageClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1beta1.ReadSession}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.createReadSession(request); */ createReadSession( request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, @@ -485,7 +490,11 @@ export class BigQueryStorageClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.batchCreateReadSessionStreams(request); */ batchCreateReadSessionStreams( request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, @@ -596,7 +605,11 @@ export class BigQueryStorageClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.finalizeStream(request); */ finalizeStream( request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, @@ -710,7 +723,11 @@ export class BigQueryStorageClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.splitReadStream(request); */ splitReadStream( request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, @@ -781,6 +798,13 @@ export class BigQueryStorageClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * for more details and examples. + * @example + * const stream = client.readRows(request); + * stream.on('data', (response) => { ... }); + * stream.on('end', () => { ... }); */ readRows( request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, @@ -928,9 +952,10 @@ export class BigQueryStorageClient { } /** - * Terminate the GRPC channel and close the client. + * Terminate the gRPC channel and close the client. * * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise { this.initialize(); diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 08f1c1064e3..45b45bd4e5f 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,23 +3,15 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "10c93ec771f1715a8f97dde1840ed5071275bec8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "4c5071b615d96ef9dfd6a63d8429090f1f2872bb", - "internalRef": "327369997" + "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", + "sha": "9b828c649de99a96418c1d580c6440016aa298a3" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ba9918cd22874245b55734f57470c719b577e591" + "sha": "1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b" } } ], @@ -96,6 +88,7 @@ "README.md", "api-extractor.json", "linkinator.config.json", + "package-lock.json.1985761200", "protos/google/cloud/bigquery/storage/v1/arrow.proto", "protos/google/cloud/bigquery/storage/v1/avro.proto", "protos/google/cloud/bigquery/storage/v1/storage.proto", @@ -110,6 +103,7 @@ "protos/protos.json", "renovate.json", "samples/README.md", + "samples/package-lock.json.1553614215", "src/v1/big_query_read_client.ts", "src/v1/big_query_read_client_config.json", "src/v1/big_query_read_proto_list.json", diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 5edf8f362ab..f6f97a14228 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -18,8 +18,15 @@ import {BigQueryReadClient} from '@google-cloud/bigquery-storage'; +// check that the client class type name can be used +function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { + client.close(); +} + function main() { - new BigQueryReadClient(); + // check that the client instance can be created + const bigQueryReadClient = new BigQueryReadClient(); + doStuffWithBigQueryReadClient(bigQueryReadClient); } main(); diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 4c1ba3eb79a..39d90f771de 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -20,32 +20,32 @@ import {packNTest} from 'pack-n-play'; import {readFileSync} from 'fs'; import {describe, it} from 'mocha'; -describe('typescript consumer tests', () => { - it('should have correct type signature for typescript users', async function () { +describe('📦 pack-n-play test', () => { + it('TypeScript code', async function () { this.timeout(300000); const options = { - packageDir: process.cwd(), // path to your module. + packageDir: process.cwd(), sample: { - description: 'typescript based user can use the type definitions', + description: 'TypeScript user can use the type definitions', ts: readFileSync( './system-test/fixtures/sample/src/index.ts' ).toString(), }, }; - await packNTest(options); // will throw upon error. + await packNTest(options); }); - it('should have correct type signature for javascript users', async function () { + it('JavaScript code', async function () { this.timeout(300000); const options = { - packageDir: process.cwd(), // path to your module. + packageDir: process.cwd(), sample: { - description: 'typescript based user can use the type definitions', + description: 'JavaScript user can use the library', ts: readFileSync( './system-test/fixtures/sample/src/index.js' ).toString(), }, }; - await packNTest(options); // will throw upon error. + await packNTest(options); }); }); From f8bb608a81f96e73e4916d1f653f90f468dacd83 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Nov 2020 09:34:13 -0800 Subject: [PATCH 088/333] chore: release 2.2.2 (#127) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 99a9dffbcaf..052898fde8d 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.2.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.1...v2.2.2) (2020-11-07) + + +### Bug Fixes + +* do not modify options object, use defaultScopes ([#126](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/126)) ([6f8eb24](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6f8eb244b1b06a928641550b2390e03964a14981)) + ### [2.2.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.0...v2.2.1) (2020-07-09) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6f9873f5c5f..b8ab8a9edeb 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.2.1", + "version": "2.2.2", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 4dfcde5dbaa6071d9c390867506496e5090a10f4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 23 Nov 2020 12:32:39 -0800 Subject: [PATCH 089/333] fix(browser): check for fetch on window --- .../bigquery-storage/protos/protos.json | 101 ++++++++++++++++-- .../src/v1/big_query_read_client.ts | 38 ++++--- .../src/v1beta1/big_query_storage_client.ts | 58 +++++----- handwritten/bigquery-storage/synth.metadata | 14 ++- 4 files changed, 159 insertions(+), 52 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index ca83026b195..2c7dbe17982 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -73,7 +73,18 @@ "(google.api.http).post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", "(google.api.http).body": "*", "(google.api.method_signature)": "parent,read_session,max_stream_count" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "parent,read_session,max_stream_count" + } + ] }, "ReadRows": { "requestType": "ReadRowsRequest", @@ -82,14 +93,31 @@ "options": { "(google.api.http).get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}", "(google.api.method_signature)": "read_stream,offset" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "read_stream,offset" + } + ] }, "SplitReadStream": { "requestType": "SplitReadStreamRequest", "responseType": "SplitReadStreamResponse", "options": { "(google.api.http).get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + } + } + ] } } }, @@ -425,7 +453,22 @@ "(google.api.http).additional_bindings.post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", "(google.api.http).additional_bindings.body": "*", "(google.api.method_signature)": "table_reference,parent,requested_streams" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{table_reference.project_id=projects/*}", + "body": "*", + "additional_bindings": { + "post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", + "body": "*" + } + } + }, + { + "(google.api.method_signature)": "table_reference,parent,requested_streams" + } + ] }, "ReadRows": { "requestType": "ReadRowsRequest", @@ -434,7 +477,17 @@ "options": { "(google.api.http).get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}", "(google.api.method_signature)": "read_position" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "read_position" + } + ] }, "BatchCreateReadSessionStreams": { "requestType": "BatchCreateReadSessionStreamsRequest", @@ -443,7 +496,18 @@ "(google.api.http).post": "/v1beta1/{session.name=projects/*/sessions/*}", "(google.api.http).body": "*", "(google.api.method_signature)": "session,requested_streams" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{session.name=projects/*/sessions/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "session,requested_streams" + } + ] }, "FinalizeStream": { "requestType": "FinalizeStreamRequest", @@ -452,7 +516,18 @@ "(google.api.http).post": "/v1beta1/{stream.name=projects/*/streams/*}", "(google.api.http).body": "*", "(google.api.method_signature)": "stream" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{stream.name=projects/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "stream" + } + ] }, "SplitReadStream": { "requestType": "SplitReadStreamRequest", @@ -460,7 +535,17 @@ "options": { "(google.api.http).get": "/v1beta1/{original_stream.name=projects/*/streams/*}", "(google.api.method_signature)": "original_stream" - } + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1beta1/{original_stream.name=projects/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "original_stream" + } + ] } } }, diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index d3cdbc68257..12bf9595d6b 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -16,11 +16,17 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** +/* global window */ import * as gax from 'google-gax'; import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; import * as protos from '../../protos/protos'; +/** + * Client JSON configuration object, loaded from + * `src/v1/big_query_read_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ import * as gapicConfig from './big_query_read_client_config.json'; const version = require('../../../package.json').version; @@ -76,9 +82,9 @@ export class BigQueryReadClient { * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - client configuration override. - * TODO(@alexander-fenster): link to gax documentation. - * @param {boolean} fallback - Use HTTP fallback mode. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. * In fallback mode, a special browser-compatible transport implementation is used * instead of gRPC transport. In browser context (if the `window` object is defined) * the fallback mode is enabled automatically; set `options.fallback` to `false` @@ -91,7 +97,9 @@ export class BigQueryReadClient { opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; const port = opts?.port || staticMembers.port; const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? typeof window !== 'undefined'; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. @@ -306,7 +314,7 @@ export class BigQueryReadClient { // ------------------- createReadSession( request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.cloud.bigquery.storage.v1.IReadSession, @@ -319,7 +327,7 @@ export class BigQueryReadClient { >; createReadSession( request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.cloud.bigquery.storage.v1.IReadSession, | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest @@ -388,7 +396,7 @@ export class BigQueryReadClient { createReadSession( request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.cloud.bigquery.storage.v1.IReadSession, | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest @@ -414,12 +422,12 @@ export class BigQueryReadClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -434,7 +442,7 @@ export class BigQueryReadClient { } splitReadStream( request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, @@ -447,7 +455,7 @@ export class BigQueryReadClient { >; splitReadStream( request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest @@ -505,7 +513,7 @@ export class BigQueryReadClient { splitReadStream( request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest @@ -531,12 +539,12 @@ export class BigQueryReadClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -581,7 +589,7 @@ export class BigQueryReadClient { */ readRows( request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, - options?: gax.CallOptions + options?: CallOptions ): gax.CancellableStream { request = request || {}; options = options || {}; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 937f3012e43..23912e589cc 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -16,11 +16,17 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** +/* global window */ import * as gax from 'google-gax'; import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; import * as protos from '../../protos/protos'; +/** + * Client JSON configuration object, loaded from + * `src/v1beta1/big_query_storage_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ import * as gapicConfig from './big_query_storage_client_config.json'; const version = require('../../../package.json').version; @@ -76,9 +82,9 @@ export class BigQueryStorageClient { * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - client configuration override. - * TODO(@alexander-fenster): link to gax documentation. - * @param {boolean} fallback - Use HTTP fallback mode. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. * In fallback mode, a special browser-compatible transport implementation is used * instead of gRPC transport. In browser context (if the `window` object is defined) * the fallback mode is enabled automatically; set `options.fallback` to `false` @@ -91,7 +97,9 @@ export class BigQueryStorageClient { opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; const port = opts?.port || staticMembers.port; const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? typeof window !== 'undefined'; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. @@ -309,7 +317,7 @@ export class BigQueryStorageClient { // ------------------- createReadSession( request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IReadSession, @@ -322,7 +330,7 @@ export class BigQueryStorageClient { >; createReadSession( request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.cloud.bigquery.storage.v1beta1.IReadSession, | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest @@ -393,7 +401,7 @@ export class BigQueryStorageClient { createReadSession( request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.cloud.bigquery.storage.v1beta1.IReadSession, | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest @@ -419,12 +427,12 @@ export class BigQueryStorageClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -440,7 +448,7 @@ export class BigQueryStorageClient { } batchCreateReadSessionStreams( request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, @@ -453,7 +461,7 @@ export class BigQueryStorageClient { >; batchCreateReadSessionStreams( request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest @@ -499,7 +507,7 @@ export class BigQueryStorageClient { batchCreateReadSessionStreams( request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest @@ -525,12 +533,12 @@ export class BigQueryStorageClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -549,7 +557,7 @@ export class BigQueryStorageClient { } finalizeStream( request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.protobuf.IEmpty, @@ -562,7 +570,7 @@ export class BigQueryStorageClient { >; finalizeStream( request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.protobuf.IEmpty, | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest @@ -614,7 +622,7 @@ export class BigQueryStorageClient { finalizeStream( request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.protobuf.IEmpty, | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest @@ -640,12 +648,12 @@ export class BigQueryStorageClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -660,7 +668,7 @@ export class BigQueryStorageClient { } splitReadStream( request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options?: gax.CallOptions + options?: CallOptions ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, @@ -673,7 +681,7 @@ export class BigQueryStorageClient { >; splitReadStream( request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options: gax.CallOptions, + options: CallOptions, callback: Callback< protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest @@ -732,7 +740,7 @@ export class BigQueryStorageClient { splitReadStream( request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, optionsOrCallback?: - | gax.CallOptions + | CallOptions | Callback< protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest @@ -758,12 +766,12 @@ export class BigQueryStorageClient { ] > | void { request = request || {}; - let options: gax.CallOptions; + let options: CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; } else { - options = optionsOrCallback as gax.CallOptions; + options = optionsOrCallback as CallOptions; } options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -808,7 +816,7 @@ export class BigQueryStorageClient { */ readRows( request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, - options?: gax.CallOptions + options?: CallOptions ): gax.CancellableStream { request = request || {}; options = options || {}; diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 45b45bd4e5f..a626ba33fb3 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -3,8 +3,16 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/nodejs-bigquery-storage.git", - "sha": "9b828c649de99a96418c1d580c6440016aa298a3" + "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", + "sha": "9d0131d14ff5512b3bf27c4d68535f4449e7b5bb" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "2f019bf70bfe06f1e2af1b04011b0a2405190e43", + "internalRef": "343202295" } }, { @@ -88,7 +96,6 @@ "README.md", "api-extractor.json", "linkinator.config.json", - "package-lock.json.1985761200", "protos/google/cloud/bigquery/storage/v1/arrow.proto", "protos/google/cloud/bigquery/storage/v1/avro.proto", "protos/google/cloud/bigquery/storage/v1/storage.proto", @@ -103,7 +110,6 @@ "protos/protos.json", "renovate.json", "samples/README.md", - "samples/package-lock.json.1553614215", "src/v1/big_query_read_client.ts", "src/v1/big_query_read_client_config.json", "src/v1/big_query_read_proto_list.json", From a17ce159c32c88b0f20e6dc06d70002fdb5c8419 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 25 Nov 2020 08:34:58 -0800 Subject: [PATCH 090/333] docs: spelling correction for "targetting" (#131) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/c503f640-90ae-4547-bcc8-a154d32c609e/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/15013eff642a7e7e855aed5a29e6e83c39beba2a --- handwritten/bigquery-storage/README.md | 2 +- handwritten/bigquery-storage/synth.metadata | 87 +-------------------- 2 files changed, 3 insertions(+), 86 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 3a07845c979..b7ac445dad1 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -200,7 +200,7 @@ Our client libraries follow the [Node.js release schedule](https://nodejs.org/en Libraries are compatible with all current _active_ and _maintenance_ versions of Node.js. -Client libraries targetting some end-of-life versions of Node.js are available, and +Client libraries targeting some end-of-life versions of Node.js are available, and can be installed via npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). The dist-tags follow the naming convention `legacy-(version)`. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index a626ba33fb3..8b90f7e79bf 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "9d0131d14ff5512b3bf27c4d68535f4449e7b5bb" + "sha": "d837dfc841cf3e77fbc2482dbabb149e2fc4f76a" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b" + "sha": "15013eff642a7e7e855aed5a29e6e83c39beba2a" } } ], @@ -42,88 +42,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".eslintignore", - ".eslintrc.json", - ".gitattributes", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/workflows/ci.yaml", - ".gitignore", - ".jsdoc.js", - ".kokoro/.gitattributes", - ".kokoro/common.cfg", - ".kokoro/continuous/node10/common.cfg", - ".kokoro/continuous/node10/docs.cfg", - ".kokoro/continuous/node10/test.cfg", - ".kokoro/continuous/node12/common.cfg", - ".kokoro/continuous/node12/lint.cfg", - ".kokoro/continuous/node12/samples-test.cfg", - ".kokoro/continuous/node12/system-test.cfg", - ".kokoro/continuous/node12/test.cfg", - ".kokoro/docs.sh", - ".kokoro/lint.sh", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/node10/common.cfg", - ".kokoro/presubmit/node12/common.cfg", - ".kokoro/presubmit/node12/samples-test.cfg", - ".kokoro/presubmit/node12/system-test.cfg", - ".kokoro/presubmit/node12/test.cfg", - ".kokoro/publish.sh", - ".kokoro/release/docs-devsite.cfg", - ".kokoro/release/docs-devsite.sh", - ".kokoro/release/docs.cfg", - ".kokoro/release/docs.sh", - ".kokoro/release/publish.cfg", - ".kokoro/samples-test.sh", - ".kokoro/system-test.sh", - ".kokoro/test.bat", - ".kokoro/test.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".mocharc.js", - ".nycrc", - ".prettierignore", - ".prettierrc.js", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.md", - "LICENSE", - "README.md", - "api-extractor.json", - "linkinator.config.json", - "protos/google/cloud/bigquery/storage/v1/arrow.proto", - "protos/google/cloud/bigquery/storage/v1/avro.proto", - "protos/google/cloud/bigquery/storage/v1/storage.proto", - "protos/google/cloud/bigquery/storage/v1/stream.proto", - "protos/google/cloud/bigquery/storage/v1beta1/arrow.proto", - "protos/google/cloud/bigquery/storage/v1beta1/avro.proto", - "protos/google/cloud/bigquery/storage/v1beta1/read_options.proto", - "protos/google/cloud/bigquery/storage/v1beta1/storage.proto", - "protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto", - "protos/protos.d.ts", - "protos/protos.js", - "protos/protos.json", - "renovate.json", - "samples/README.md", - "src/v1/big_query_read_client.ts", - "src/v1/big_query_read_client_config.json", - "src/v1/big_query_read_proto_list.json", - "src/v1/index.ts", - "src/v1beta1/big_query_storage_client.ts", - "src/v1beta1/big_query_storage_client_config.json", - "src/v1beta1/big_query_storage_proto_list.json", - "src/v1beta1/index.ts", - "system-test/fixtures/sample/src/index.js", - "system-test/fixtures/sample/src/index.ts", - "system-test/install.ts", - "test/gapic_big_query_read_v1.ts", - "test/gapic_big_query_storage_v1beta1.ts", - "tsconfig.json", - "webpack.config.js" ] } \ No newline at end of file From 01c52b52c89289886bdca318902183cd747d5d4d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 2 Dec 2020 10:38:41 -0800 Subject: [PATCH 091/333] chore: release 2.2.3 (#130) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 052898fde8d..112ba679277 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.2.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.2...v2.2.3) (2020-11-25) + + +### Bug Fixes + +* **browser:** check for fetch on window ([d837dfc](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/d837dfc841cf3e77fbc2482dbabb149e2fc4f76a)) + ### [2.2.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.1...v2.2.2) (2020-11-07) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index b8ab8a9edeb..e9159681af6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.2.2", + "version": "2.2.3", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 16f760cd4d08f3378b5bf62ab961e7a7acf4f55e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 4 Dec 2020 08:56:30 -0800 Subject: [PATCH 092/333] chore: generate GAPIC metadata JSON file (#132) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/464906e9-6f3c-41d1-906e-fd7073c42d19/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 345596855 Source-Link: https://github.com/googleapis/googleapis/commit/d189e871205fea665a9648f7c4676f027495ccaf --- .../src/v1/gapic_metadata.json | 48 +++++++++++++ .../src/v1beta1/gapic_metadata.json | 68 +++++++++++++++++++ handwritten/bigquery-storage/synth.metadata | 6 +- 3 files changed, 119 insertions(+), 3 deletions(-) create mode 100644 handwritten/bigquery-storage/src/v1/gapic_metadata.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json diff --git a/handwritten/bigquery-storage/src/v1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1/gapic_metadata.json new file mode 100644 index 00000000000..3c84159f688 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/gapic_metadata.json @@ -0,0 +1,48 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1", + "libraryPackage": "@google-cloud/bigquery-storage", + "services": { + "BigQueryRead": { + "clients": { + "grpc": { + "libraryClient": "BigQueryReadClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + }, + "ReadRows": { + "methods": [ + "readRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryReadClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json new file mode 100644 index 00000000000..00d888bf605 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json @@ -0,0 +1,68 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1beta1", + "libraryPackage": "@google-cloud/bigquery-storage", + "services": { + "BigQueryStorage": { + "clients": { + "grpc": { + "libraryClient": "BigQueryStorageClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "BatchCreateReadSessionStreams": { + "methods": [ + "batchCreateReadSessionStreams" + ] + }, + "FinalizeStream": { + "methods": [ + "finalizeStream" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + }, + "ReadRows": { + "methods": [ + "readRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryStorageClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "BatchCreateReadSessionStreams": { + "methods": [ + "batchCreateReadSessionStreams" + ] + }, + "FinalizeStream": { + "methods": [ + "finalizeStream" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 8b90f7e79bf..70b1e3840bc 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "d837dfc841cf3e77fbc2482dbabb149e2fc4f76a" + "sha": "4f2862e8145c3cfde1bd97ccfd7cbc666ad18f83" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2f019bf70bfe06f1e2af1b04011b0a2405190e43", - "internalRef": "343202295" + "sha": "d189e871205fea665a9648f7c4676f027495ccaf", + "internalRef": "345596855" } }, { From adc965282831d13b28760687baf0a9889c71d743 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 22 Dec 2020 11:46:23 -0800 Subject: [PATCH 093/333] docs: add instructions for authenticating for system tests (#133) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/9943226b-8e9b-44fd-b025-446fc572242a/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/363fe305e9ce34a6cd53951c6ee5f997094b54ee --- handwritten/bigquery-storage/CONTRIBUTING.md | 15 +++++++++++++-- handwritten/bigquery-storage/README.md | 3 +-- handwritten/bigquery-storage/synth.metadata | 4 ++-- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/CONTRIBUTING.md b/handwritten/bigquery-storage/CONTRIBUTING.md index f6c4cf010e3..3281e44c984 100644 --- a/handwritten/bigquery-storage/CONTRIBUTING.md +++ b/handwritten/bigquery-storage/CONTRIBUTING.md @@ -37,6 +37,15 @@ accept your pull requests. 1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. 1. Submit a pull request. +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable billing for your project][billing]. +1. [Enable the Google BigQuery Storage API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + + ## Running the tests 1. [Prepare your environment for Node.js setup][setup]. @@ -51,11 +60,9 @@ accept your pull requests. npm test # Run sample integration tests. - gcloud auth application-default login npm run samples-test # Run all system tests. - gcloud auth application-default login npm run system-test 1. Lint (and maybe fix) any changes: @@ -63,3 +70,7 @@ accept your pull requests. npm run fix [setup]: https://cloud.google.com/nodejs/docs/setup +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started \ No newline at end of file diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index b7ac445dad1..c562b19865d 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -182,8 +182,7 @@ async function bigqueryStorageQuickstart() { ## Samples -Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples) directory. The samples' `README.md` -has instructions for running the samples. +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples) directory. Each sample's `README.md` has instructions for running its sample. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 70b1e3840bc..0c2dadf2c36 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "4f2862e8145c3cfde1bd97ccfd7cbc666ad18f83" + "sha": "0ce572e13d2b82409d4e187f7f2b5e1171cfbcc5" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "15013eff642a7e7e855aed5a29e6e83c39beba2a" + "sha": "363fe305e9ce34a6cd53951c6ee5f997094b54ee" } } ], From c125994f0a3519d499f4ee3cdc2c31b57a35ce98 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 1 Jan 2021 19:41:38 -0800 Subject: [PATCH 094/333] chore: update license headers (#134) --- handwritten/bigquery-storage/.jsdoc.js | 4 ++-- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- handwritten/bigquery-storage/src/v1/big_query_read_client.ts | 2 +- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../bigquery-storage/src/v1beta1/big_query_storage_client.ts | 2 +- handwritten/bigquery-storage/src/v1beta1/index.ts | 2 +- handwritten/bigquery-storage/synth.metadata | 2 +- .../bigquery-storage/system-test/fixtures/sample/src/index.js | 2 +- .../bigquery-storage/system-test/fixtures/sample/src/index.ts | 2 +- handwritten/bigquery-storage/system-test/install.ts | 2 +- handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts | 2 +- .../bigquery-storage/test/gapic_big_query_storage_v1beta1.ts | 2 +- 13 files changed, 14 insertions(+), 14 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index ac0f1658a27..9451ad92fe9 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2020 Google LLC', + copyright: 'Copyright 2021 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index f94b251defa..a6882c87ab3 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 0927a833d1a..ea053058b23 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 12bf9595d6b..a5d7db865c2 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index c882095873b..db2de17014f 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 23912e589cc..da6f81c04e4 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index 9b3c5a12d86..2c9b3f3f526 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 0c2dadf2c36..c547fdd0c6d 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "0ce572e13d2b82409d4e187f7f2b5e1171cfbcc5" + "sha": "89ed81d984c38439e885248cb971473ce9887a8a" } }, { diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index a0f6f03ee57..627efaa9f20 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index f6f97a14228..3b55b605b29 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 39d90f771de..d2d61c0396f 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index fb204c5c622..6d69543ebaf 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 117757e569b..471cebc8ec7 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From b4aa9feb39141a6fb7d3a11a4ffd2f6a90db7e3e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 8 Jan 2021 18:42:17 -0800 Subject: [PATCH 095/333] feat: introduce style enumeration (#135) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/99ff624a-f7e4-4941-b7ed-794cee704ba0/targets - [ ] To automatically regenerate this PR, check this box. --- .../bigquery-storage/protos/protos.d.ts | 12 +++ handwritten/bigquery-storage/protos/protos.js | 78 ++++++++++++++++++- .../bigquery-storage/protos/protos.json | 13 +++- handwritten/bigquery-storage/synth.metadata | 2 +- 4 files changed, 102 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index a6882c87ab3..9a211eb7968 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4300,6 +4300,9 @@ export namespace google { /** ResourceDescriptor singular */ singular?: (string|null); + + /** ResourceDescriptor style */ + style?: (google.api.ResourceDescriptor.Style[]|null); } /** Represents a ResourceDescriptor. */ @@ -4329,6 +4332,9 @@ export namespace google { /** ResourceDescriptor singular. */ public singular: string; + /** ResourceDescriptor style. */ + public style: google.api.ResourceDescriptor.Style[]; + /** * Creates a new ResourceDescriptor instance using the specified properties. * @param [properties] Properties to set @@ -4408,6 +4414,12 @@ export namespace google { ORIGINALLY_SINGLE_PATTERN = 1, FUTURE_MULTI_PATTERN = 2 } + + /** Style enum. */ + enum Style { + STYLE_UNSPECIFIED = 0, + DECLARATIVE_FRIENDLY = 1 + } } /** Properties of a ResourceReference. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index ea053058b23..25fff2f0dcb 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -10124,6 +10124,7 @@ * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history * @property {string|null} [plural] ResourceDescriptor plural * @property {string|null} [singular] ResourceDescriptor singular + * @property {Array.|null} [style] ResourceDescriptor style */ /** @@ -10136,6 +10137,7 @@ */ function ResourceDescriptor(properties) { this.pattern = []; + this.style = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -10190,6 +10192,14 @@ */ ResourceDescriptor.prototype.singular = ""; + /** + * ResourceDescriptor style. + * @member {Array.} style + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.style = $util.emptyArray; + /** * Creates a new ResourceDescriptor instance using the specified properties. * @function create @@ -10227,6 +10237,12 @@ writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); + if (message.style != null && message.style.length) { + writer.uint32(/* id 10, wireType 2 =*/82).fork(); + for (var i = 0; i < message.style.length; ++i) + writer.int32(message.style[i]); + writer.ldelim(); + } return writer; }; @@ -10281,6 +10297,16 @@ case 6: message.singular = reader.string(); break; + case 10: + if (!(message.style && message.style.length)) + message.style = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.style.push(reader.int32()); + } else + message.style.push(reader.int32()); + break; default: reader.skipType(tag & 7); break; @@ -10344,6 +10370,18 @@ if (message.singular != null && message.hasOwnProperty("singular")) if (!$util.isString(message.singular)) return "singular: string expected"; + if (message.style != null && message.hasOwnProperty("style")) { + if (!Array.isArray(message.style)) + return "style: array expected"; + for (var i = 0; i < message.style.length; ++i) + switch (message.style[i]) { + default: + return "style: enum value[] expected"; + case 0: + case 1: + break; + } + } return null; }; @@ -10388,6 +10426,23 @@ message.plural = String(object.plural); if (object.singular != null) message.singular = String(object.singular); + if (object.style) { + if (!Array.isArray(object.style)) + throw TypeError(".google.api.ResourceDescriptor.style: array expected"); + message.style = []; + for (var i = 0; i < object.style.length; ++i) + switch (object.style[i]) { + default: + case "STYLE_UNSPECIFIED": + case 0: + message.style[i] = 0; + break; + case "DECLARATIVE_FRIENDLY": + case 1: + message.style[i] = 1; + break; + } + } return message; }; @@ -10404,8 +10459,10 @@ if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) + if (options.arrays || options.defaults) { object.pattern = []; + object.style = []; + } if (options.defaults) { object.type = ""; object.nameField = ""; @@ -10428,6 +10485,11 @@ object.plural = message.plural; if (message.singular != null && message.hasOwnProperty("singular")) object.singular = message.singular; + if (message.style && message.style.length) { + object.style = []; + for (var j = 0; j < message.style.length; ++j) + object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; + } return object; }; @@ -10458,6 +10520,20 @@ return values; })(); + /** + * Style enum. + * @name google.api.ResourceDescriptor.Style + * @enum {number} + * @property {number} STYLE_UNSPECIFIED=0 STYLE_UNSPECIFIED value + * @property {number} DECLARATIVE_FRIENDLY=1 DECLARATIVE_FRIENDLY value + */ + ResourceDescriptor.Style = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STYLE_UNSPECIFIED"] = 0; + values[valuesById[1] = "DECLARATIVE_FRIENDLY"] = 1; + return values; + })(); + return ResourceDescriptor; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 2c7dbe17982..c3e23963a79 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1026,6 +1026,11 @@ "singular": { "type": "string", "id": 6 + }, + "style": { + "rule": "repeated", + "type": "Style", + "id": 10 } }, "nested": { @@ -1035,6 +1040,12 @@ "ORIGINALLY_SINGLE_PATTERN": 1, "FUTURE_MULTI_PATTERN": 2 } + }, + "Style": { + "values": { + "STYLE_UNSPECIFIED": 0, + "DECLARATIVE_FRIENDLY": 1 + } } } }, @@ -1054,7 +1065,7 @@ }, "protobuf": { "options": { - "go_package": "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor", + "go_package": "google.golang.org/protobuf/types/descriptorpb", "java_package": "com.google.protobuf", "java_outer_classname": "DescriptorProtos", "csharp_namespace": "Google.Protobuf.Reflection", diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index c547fdd0c6d..cedad127a34 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "89ed81d984c38439e885248cb971473ce9887a8a" + "sha": "89e1d811c8e0b5282519807b0ea1cdd3afc0ea87" } }, { From 55342a9c0bf41456b7a3a1e9be44a46a1c1f545f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Jan 2021 18:36:35 +0000 Subject: [PATCH 096/333] chore: release 2.3.0 (#136) :robot: I have created a release \*beep\* \*boop\* --- ## [2.3.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.3...v2.3.0) (2021-01-09) ### Features * introduce style enumeration ([#135](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/135)) ([4a8f699](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/4a8f699472d67aae4300c458308c2fa4fa372592)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 112ba679277..00c433bccd8 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.3.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.3...v2.3.0) (2021-01-09) + + +### Features + +* introduce style enumeration ([#135](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/135)) ([4a8f699](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/4a8f699472d67aae4300c458308c2fa4fa372592)) + ### [2.2.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.2...v2.2.3) (2020-11-25) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index e9159681af6..d2e99ee50f9 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.2.3", + "version": "2.3.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From fb4e00196b7872b07e9615ecf5086dfc0f4db23b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 27 Jan 2021 08:42:27 -0800 Subject: [PATCH 097/333] refactor(nodejs): move build cop to flakybot (#139) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/dfbad313-7afb-4cf6-b229-0476fcc2130c/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/57c23fa5705499a4181095ced81f0ee0933b64f6 --- handwritten/bigquery-storage/.kokoro/samples-test.sh | 6 +++--- handwritten/bigquery-storage/.kokoro/system-test.sh | 6 +++--- handwritten/bigquery-storage/.kokoro/test.sh | 6 +++--- handwritten/bigquery-storage/.kokoro/trampoline_v2.sh | 2 +- handwritten/bigquery-storage/synth.metadata | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index bab7ba4e967..950f8483428 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -39,14 +39,14 @@ if [ -f samples/package.json ]; then npm link ../ npm install cd .. - # If tests are running against master, configure Build Cop + # If tests are running against master, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot } trap cleanup EXIT HUP fi diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 8a08400484a..319d1e0eda8 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -33,14 +33,14 @@ fi npm install -# If tests are running against master, configure Build Cop +# If tests are running against master, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot } trap cleanup EXIT HUP fi diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 5be385fef64..5d6383fcb78 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -21,14 +21,14 @@ export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. npm install -# If tests are running against master, configure Build Cop +# If tests are running against master, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml export MOCHA_REPORTER=xunit cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot } trap cleanup EXIT HUP fi diff --git a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh index 606d4321458..4d03112128a 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh @@ -162,7 +162,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For flakybot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index cedad127a34..089f2da19a6 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "89e1d811c8e0b5282519807b0ea1cdd3afc0ea87" + "sha": "9079ae02c578d3dce53abf04984bbe67aa053236" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "363fe305e9ce34a6cd53951c6ee5f997094b54ee" + "sha": "57c23fa5705499a4181095ced81f0ee0933b64f6" } } ], From 251b0d466b0e6a1686d0f795d21f91af96358348 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Tue, 2 Feb 2021 17:52:22 -0800 Subject: [PATCH 098/333] chore: update CODEOWNERS config (#140) --- .../bigquery-storage/.repo-metadata.json | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index 7ae251898a0..1c173eeacf9 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -1,13 +1,14 @@ { - "name": "bigquerystorage", - "name_pretty": "Google BigQuery Storage", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", - "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", - "issue_tracker": "https://b.corp.google.com/savedsearches/559654", - "release_level": "ga", - "language": "nodejs", - "repo": "googleapis/nodejs-bigquery-storage", - "distribution_name": "@google-cloud/bigquery-storage", - "api_id": "bigquerystorage.googleapis.com", - "requires_billing": true - } + "name": "bigquerystorage", + "name_pretty": "Google BigQuery Storage", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", + "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", + "issue_tracker": "https://b.corp.google.com/savedsearches/559654", + "release_level": "ga", + "language": "nodejs", + "repo": "googleapis/nodejs-bigquery-storage", + "distribution_name": "@google-cloud/bigquery-storage", + "api_id": "bigquerystorage.googleapis.com", + "requires_billing": true, + "codeowner_team": "@googleapis/api-bigquery" +} From f37d7d36b29070ad26a9cc20078e75a9cf60b47c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Feb 2021 18:06:14 -0800 Subject: [PATCH 099/333] build: adds UNORDERED_LIST enum (#141) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/3c80116f-2a08-4668-9798-3d8677b45629/targets - [ ] To automatically regenerate this PR, check this box. --- handwritten/bigquery-storage/protos/protos.d.ts | 3 ++- handwritten/bigquery-storage/protos/protos.js | 7 +++++++ handwritten/bigquery-storage/protos/protos.json | 3 ++- handwritten/bigquery-storage/synth.metadata | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 9a211eb7968..e2bf17d7e22 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4277,7 +4277,8 @@ export namespace google { REQUIRED = 2, OUTPUT_ONLY = 3, INPUT_ONLY = 4, - IMMUTABLE = 5 + IMMUTABLE = 5, + UNORDERED_LIST = 6 } /** Properties of a ResourceDescriptor. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 25fff2f0dcb..70c91d583af 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -10100,6 +10100,7 @@ * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value * @property {number} INPUT_ONLY=4 INPUT_ONLY value * @property {number} IMMUTABLE=5 IMMUTABLE value + * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value */ api.FieldBehavior = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -10109,6 +10110,7 @@ values[valuesById[3] = "OUTPUT_ONLY"] = 3; values[valuesById[4] = "INPUT_ONLY"] = 4; values[valuesById[5] = "IMMUTABLE"] = 5; + values[valuesById[6] = "UNORDERED_LIST"] = 6; return values; })(); @@ -16276,6 +16278,7 @@ case 3: case 4: case 5: + case 6: break; } } @@ -16376,6 +16379,10 @@ case 5: message[".google.api.fieldBehavior"][i] = 5; break; + case "UNORDERED_LIST": + case 6: + message[".google.api.fieldBehavior"][i] = 6; + break; } } if (object[".google.api.resourceReference"] != null) { diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index c3e23963a79..e959d1cef26 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -981,7 +981,8 @@ "REQUIRED": 2, "OUTPUT_ONLY": 3, "INPUT_ONLY": 4, - "IMMUTABLE": 5 + "IMMUTABLE": 5, + "UNORDERED_LIST": 6 } }, "resourceReference": { diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 089f2da19a6..7ffc39ae2c0 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "9079ae02c578d3dce53abf04984bbe67aa053236" + "sha": "9606545d68bf4fd62ec8c3983e1ca85db10e7bc5" } }, { From 7b0f9f76cd19f45038ad373097f086ad8accbeda Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sun, 7 Mar 2021 09:00:36 -0800 Subject: [PATCH 100/333] build: update gapic-generator-typescript to v1.2.10. (#142) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/9f8e7599-f87d-4fac-8f3a-a2e1781c33be/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 361273630 Source-Link: https://github.com/googleapis/googleapis/commit/5477122b3e8037a1dc5bc920536158edbd151dc4 --- handwritten/bigquery-storage/synth.metadata | 6 +++--- handwritten/bigquery-storage/webpack.config.js | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index 7ffc39ae2c0..b4392c2cc14 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "9606545d68bf4fd62ec8c3983e1ca85db10e7bc5" + "sha": "2790786fb23c579b7a1ca7a3bdf91f375786f16e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d189e871205fea665a9648f7c4676f027495ccaf", - "internalRef": "345596855" + "sha": "5477122b3e8037a1dc5bc920536158edbd151dc4", + "internalRef": "361273630" } }, { diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js index 215b4b9acb9..de163617408 100644 --- a/handwritten/bigquery-storage/webpack.config.js +++ b/handwritten/bigquery-storage/webpack.config.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From e4d8a8b77f19d93cce280719ee62786325bf51c4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Mar 2021 17:52:25 +0100 Subject: [PATCH 101/333] chore(deps): update dependency sinon to v10 (#144) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [sinon](https://sinonjs.org/) ([source](https://togithub.com/sinonjs/sinon)) | [`^9.0.1` -> `^10.0.0`](https://renovatebot.com/diffs/npm/sinon/9.2.4/10.0.0) | [![age](https://badges.renovateapi.com/packages/npm/sinon/10.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/sinon/10.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/sinon/10.0.0/compatibility-slim/9.2.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/sinon/10.0.0/confidence-slim/9.2.4)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
sinonjs/sinon ### [`v10.0.0`](https://togithub.com/sinonjs/sinon/blob/master/CHANGELOG.md#​1000--2021-03-22) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v9.2.4...v10.0.0) ================== - Upgrade nise to 4.1.0 - Use [@​sinonjs/eslint-config](https://togithub.com/sinonjs/eslint-config)[@​4](https://togithub.com/4) => Adopts ES2017 => Drops support for IE 11, Legacy Edge and legacy Safari
--- ### Renovate configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d2e99ee50f9..9ae410a2959 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -44,7 +44,7 @@ "mocha": "^8.0.0", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^9.0.1", + "sinon": "^10.0.0", "ts-loader": "^8.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", From 39317b3a00d0db72fea50f94f6528c23e2573ac5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 30 Mar 2021 10:10:17 -0700 Subject: [PATCH 102/333] feat: add a Arrow compression options (Only LZ4 for now). feat: Return schema on first ReadRowsResponse doc: clarify limit on filter string. (#145) Committer: @emkornfield PiperOrigin-RevId: 365759522 Source-Author: Google APIs Source-Date: Tue Mar 30 01:19:13 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: c539b9b08b3366ee00c0ec1950f4df711552a269 Source-Link: https://github.com/googleapis/googleapis/commit/c539b9b08b3366ee00c0ec1950f4df711552a269 --- .../cloud/bigquery/storage/v1/arrow.proto | 19 +- .../cloud/bigquery/storage/v1/avro.proto | 3 +- .../cloud/bigquery/storage/v1/storage.proto | 22 +- .../cloud/bigquery/storage/v1/stream.proto | 28 +- .../bigquery-storage/protos/protos.d.ts | 123 +++++++ handwritten/bigquery-storage/protos/protos.js | 338 ++++++++++++++++++ .../bigquery-storage/protos/protos.json | 50 +++ handwritten/bigquery-storage/synth.metadata | 6 +- 8 files changed, 568 insertions(+), 21 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 1c54eeab07f..4b240f52139 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -43,3 +42,19 @@ message ArrowRecordBatch { // The count of rows in `serialized_record_batch`. int64 row_count = 2; } + +// Contains options specific to Arrow Serialization. +message ArrowSerializationOptions { + // Compression codec's supported by Arrow. + enum CompressionCodec { + // If unspecified no compression will be used. + COMPRESSION_UNSPECIFIED = 0; + + // LZ4 Frame (https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md) + LZ4_FRAME = 1; + } + + // The compression codec to use for Arrow buffers in serialized record + // batches. + CompressionCodec buffer_compression = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index 9a064447b9f..dee4a6ed229 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 26fcd6ac25c..a5fa2b9eb86 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -70,7 +69,8 @@ service BigQueryRead { post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" body: "*" }; - option (google.api.method_signature) = "parent,read_session,max_stream_count"; + option (google.api.method_signature) = + "parent,read_session,max_stream_count"; } // Reads rows from the stream in the format prescribed by the ReadSession. @@ -99,7 +99,8 @@ service BigQueryRead { // original, primary, and residual, that original[0-j] = primary[0-j] and // original[j-n] = residual[0-m] once the streams have been read to // completion. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + rpc SplitReadStream(SplitReadStreamRequest) + returns (SplitReadStreamResponse) { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" }; @@ -201,6 +202,19 @@ message ReadRowsResponse { // Throttling state. If unset, the latest response still describes // the current throttling status. ThrottleState throttle_state = 5; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. This schema is equivelant to the one returned by + // CreateSession. This field is only populated in the first ReadRowsResponse + // RPC. + oneof schema { + // Output only. Avro schema. + AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Arrow schema. + ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + } } // Request message for `SplitReadStream`. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index febad03675e..28b2ac1bbf3 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -72,17 +71,27 @@ message ReadSession { // "nullable_field is not NULL" // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" // "numeric_field BETWEEN 1.0 AND 5.0" + // + // Restricted to a maximum length for 1 MB. string row_restriction = 2; + + // Optional. Options specific to the Apache Arrow output format. + oneof output_format_serialization_options { + ArrowSerializationOptions arrow_serialization_options = 3 + [(google.api.field_behavior) = OPTIONAL]; + } } // Output only. Unique identifier for the session, in the form // `projects/{project_id}/locations/{location}/sessions/{session_id}`. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Time at which the session becomes invalid. After this time, subsequent - // requests to read this Session will return errors. The expire_time is - // automatically assigned and currently cannot be specified or updated. - google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Time at which the session becomes invalid. After this time, + // subsequent requests to read this Session will return errors. The + // expire_time is automatically assigned and currently cannot be specified or + // updated. + google.protobuf.Timestamp expire_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Immutable. Data format of the output data. DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; @@ -102,12 +111,11 @@ message ReadSession { // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` string table = 6 [ (google.api.field_behavior) = IMMUTABLE, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } ]; - // Optional. Any modifiers which are applied when reading from the specified table. + // Optional. Any modifiers which are applied when reading from the specified + // table. TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. Read options for this session (e.g. column selection, filters). diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index e2bf17d7e22..619dc695897 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -215,6 +215,105 @@ export namespace google { public toJSON(): { [k: string]: any }; } + /** Properties of an ArrowSerializationOptions. */ + interface IArrowSerializationOptions { + + /** ArrowSerializationOptions bufferCompression */ + bufferCompression?: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null); + } + + /** Represents an ArrowSerializationOptions. */ + class ArrowSerializationOptions implements IArrowSerializationOptions { + + /** + * Constructs a new ArrowSerializationOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions); + + /** ArrowSerializationOptions bufferCompression. */ + public bufferCompression: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec); + + /** + * Creates a new ArrowSerializationOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSerializationOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @param message ArrowSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @param message ArrowSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Verifies an ArrowSerializationOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSerializationOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. + * @param message ArrowSerializationOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSerializationOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace ArrowSerializationOptions { + + /** CompressionCodec enum. */ + enum CompressionCodec { + COMPRESSION_UNSPECIFIED = 0, + LZ4_FRAME = 1 + } + } + /** Properties of an AvroSchema. */ interface IAvroSchema { @@ -982,6 +1081,12 @@ export namespace google { /** ReadRowsResponse throttleState */ throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + + /** ReadRowsResponse avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); } /** Represents a ReadRowsResponse. */ @@ -1008,9 +1113,18 @@ export namespace google { /** ReadRowsResponse throttleState. */ public throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + /** ReadRowsResponse avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + /** ReadRowsResponse rows. */ public rows?: ("avroRows"|"arrowRecordBatch"); + /** ReadRowsResponse schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + /** * Creates a new ReadRowsResponse instance using the specified properties. * @param [properties] Properties to set @@ -1522,6 +1636,9 @@ export namespace google { /** TableReadOptions rowRestriction */ rowRestriction?: (string|null); + + /** TableReadOptions arrowSerializationOptions */ + arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); } /** Represents a TableReadOptions. */ @@ -1539,6 +1656,12 @@ export namespace google { /** TableReadOptions rowRestriction. */ public rowRestriction: string; + /** TableReadOptions arrowSerializationOptions. */ + public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + + /** TableReadOptions outputFormatSerializationOptions. */ + public outputFormatSerializationOptions?: "arrowSerializationOptions"; + /** * Creates a new TableReadOptions instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 70c91d583af..6f315f5ffad 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -504,6 +504,220 @@ return ArrowRecordBatch; })(); + v1.ArrowSerializationOptions = (function() { + + /** + * Properties of an ArrowSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowSerializationOptions + * @property {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null} [bufferCompression] ArrowSerializationOptions bufferCompression + */ + + /** + * Constructs a new ArrowSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowSerializationOptions. + * @implements IArrowSerializationOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set + */ + function ArrowSerializationOptions(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSerializationOptions bufferCompression. + * @member {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec} bufferCompression + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @instance + */ + ArrowSerializationOptions.prototype.bufferCompression = 0; + + /** + * Creates a new ArrowSerializationOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions instance + */ + ArrowSerializationOptions.create = function create(properties) { + return new ArrowSerializationOptions(properties); + }; + + /** + * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSerializationOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.bufferCompression != null && Object.hasOwnProperty.call(message, "bufferCompression")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.bufferCompression); + return writer; + }; + + /** + * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSerializationOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.bufferCompression = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSerializationOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSerializationOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSerializationOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) + switch (message.bufferCompression) { + default: + return "bufferCompression: enum value expected"; + case 0: + case 1: + break; + } + return null; + }; + + /** + * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + */ + ArrowSerializationOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); + switch (object.bufferCompression) { + case "COMPRESSION_UNSPECIFIED": + case 0: + message.bufferCompression = 0; + break; + case "LZ4_FRAME": + case 1: + message.bufferCompression = 1; + break; + } + return message; + }; + + /** + * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} message ArrowSerializationOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSerializationOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.bufferCompression = options.enums === String ? "COMPRESSION_UNSPECIFIED" : 0; + if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) + object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; + return object; + }; + + /** + * Converts this ArrowSerializationOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @instance + * @returns {Object.} JSON object + */ + ArrowSerializationOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * CompressionCodec enum. + * @name google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec + * @enum {number} + * @property {number} COMPRESSION_UNSPECIFIED=0 COMPRESSION_UNSPECIFIED value + * @property {number} LZ4_FRAME=1 LZ4_FRAME value + */ + ArrowSerializationOptions.CompressionCodec = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "COMPRESSION_UNSPECIFIED"] = 0; + values[valuesById[1] = "LZ4_FRAME"] = 1; + return values; + })(); + + return ArrowSerializationOptions; + })(); + v1.AvroSchema = (function() { /** @@ -2119,6 +2333,8 @@ * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount * @property {google.cloud.bigquery.storage.v1.IStreamStats|null} [stats] ReadRowsResponse stats * @property {google.cloud.bigquery.storage.v1.IThrottleState|null} [throttleState] ReadRowsResponse throttleState + * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema */ /** @@ -2176,6 +2392,22 @@ */ ReadRowsResponse.prototype.throttleState = null; + /** + * ReadRowsResponse avroSchema. + * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroSchema = null; + + /** + * ReadRowsResponse arrowSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowSchema = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -2190,6 +2422,17 @@ set: $util.oneOfSetter($oneOfFields) }); + /** + * ReadRowsResponse schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new ReadRowsResponse instance using the specified properties. * @function create @@ -2224,6 +2467,10 @@ $root.google.cloud.bigquery.storage.v1.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); return writer; }; @@ -2273,6 +2520,12 @@ case 5: message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); break; + case 7: + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + case 8: + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -2340,6 +2593,24 @@ if (error) return "throttleState." + error; } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } return null; }; @@ -2384,6 +2655,16 @@ throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.throttleState: object expected"); message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.fromObject(object.throttleState); } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); + } return message; }; @@ -2428,6 +2709,16 @@ object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; else object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } return object; }; @@ -3544,6 +3835,7 @@ * @interface ITableReadOptions * @property {Array.|null} [selectedFields] TableReadOptions selectedFields * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions */ /** @@ -3578,6 +3870,28 @@ */ TableReadOptions.prototype.rowRestriction = ""; + /** + * TableReadOptions arrowSerializationOptions. + * @member {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null|undefined} arrowSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.arrowSerializationOptions = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * TableReadOptions outputFormatSerializationOptions. + * @member {"arrowSerializationOptions"|undefined} outputFormatSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { + get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new TableReadOptions instance using the specified properties. * @function create @@ -3607,6 +3921,8 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) + $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -3649,6 +3965,9 @@ case 2: message.rowRestriction = reader.string(); break; + case 3: + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -3684,6 +4003,7 @@ TableReadOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + var properties = {}; if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { if (!Array.isArray(message.selectedFields)) return "selectedFields: array expected"; @@ -3694,6 +4014,14 @@ if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) if (!$util.isString(message.rowRestriction)) return "rowRestriction: string expected"; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + properties.outputFormatSerializationOptions = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify(message.arrowSerializationOptions); + if (error) + return "arrowSerializationOptions." + error; + } + } return null; }; @@ -3718,6 +4046,11 @@ } if (object.rowRestriction != null) message.rowRestriction = String(object.rowRestriction); + if (object.arrowSerializationOptions != null) { + if (typeof object.arrowSerializationOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); + } return message; }; @@ -3745,6 +4078,11 @@ } if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) object.rowRestriction = message.rowRestriction; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options); + if (options.oneofs) + object.outputFormatSerializationOptions = "arrowSerializationOptions"; + } return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index e959d1cef26..5ed51835500 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -40,6 +40,22 @@ } } }, + "ArrowSerializationOptions": { + "fields": { + "bufferCompression": { + "type": "CompressionCodec", + "id": 2 + } + }, + "nested": { + "CompressionCodec": { + "values": { + "COMPRESSION_UNSPECIFIED": 0, + "LZ4_FRAME": 1 + } + } + } + }, "AvroSchema": { "fields": { "schema": { @@ -197,6 +213,12 @@ "avroRows", "arrowRecordBatch" ] + }, + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] } }, "fields": { @@ -219,6 +241,20 @@ "throttleState": { "type": "ThrottleState", "id": 5 + }, + "avroSchema": { + "type": "AvroSchema", + "id": 7, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 8, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } } }, @@ -347,6 +383,13 @@ } }, "TableReadOptions": { + "oneofs": { + "outputFormatSerializationOptions": { + "oneof": [ + "arrowSerializationOptions" + ] + } + }, "fields": { "selectedFields": { "rule": "repeated", @@ -356,6 +399,13 @@ "rowRestriction": { "type": "string", "id": 2 + }, + "arrowSerializationOptions": { + "type": "ArrowSerializationOptions", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } } diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata index b4392c2cc14..d7fca4f6a6b 100644 --- a/handwritten/bigquery-storage/synth.metadata +++ b/handwritten/bigquery-storage/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "2790786fb23c579b7a1ca7a3bdf91f375786f16e" + "sha": "abe5106d1faba875bc419693bc9ede1a0912f1e8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5477122b3e8037a1dc5bc920536158edbd151dc4", - "internalRef": "361273630" + "sha": "c539b9b08b3366ee00c0ec1950f4df711552a269", + "internalRef": "365759522" } }, { From 38f3f8fc84f0f574db186c7d0e1d6435168ddab5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Apr 2021 23:10:24 +0200 Subject: [PATCH 103/333] chore(deps): update dependency @types/sinon to v10 (#150) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [@types/sinon](https://togithub.com/DefinitelyTyped/DefinitelyTyped) | [`^9.0.0` -> `^10.0.0`](https://renovatebot.com/diffs/npm/@types%2fsinon/9.0.11/10.0.0) | [![age](https://badges.renovateapi.com/packages/npm/@types%2fsinon/10.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/@types%2fsinon/10.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/@types%2fsinon/10.0.0/compatibility-slim/9.0.11)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/@types%2fsinon/10.0.0/confidence-slim/9.0.11)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 9ae410a2959..6e8c8b0245d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -34,7 +34,7 @@ "devDependencies": { "@types/mocha": "^8.0.0", "@types/node": "^13.7.1", - "@types/sinon": "^9.0.0", + "@types/sinon": "^10.0.0", "c8": "^7.1.0", "gts": "^2.0.0", "jsdoc": "^3.6.3", From 32cf6b9bb1d65409652662209efa0fe36b4a584f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Apr 2021 00:56:43 +0200 Subject: [PATCH 104/333] chore(deps): update dependency ts-loader to v9 (#153) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [ts-loader](https://togithub.com/TypeStrong/ts-loader) | [`^8.0.0` -> `^9.0.0`](https://renovatebot.com/diffs/npm/ts-loader/8.1.0/9.0.0) | [![age](https://badges.renovateapi.com/packages/npm/ts-loader/9.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/ts-loader/9.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/ts-loader/9.0.0/compatibility-slim/8.1.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/ts-loader/9.0.0/confidence-slim/8.1.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
TypeStrong/ts-loader ### [`v9.0.0`](https://togithub.com/TypeStrong/ts-loader/blob/master/CHANGELOG.md#v900) [Compare Source](https://togithub.com/TypeStrong/ts-loader/compare/v8.1.0...v9.0.0) Breaking changes: - minimum webpack version: 5 - minimum node version: 12 Changes: - [webpack 5 migration](https://togithub.com/TypeStrong/ts-loader/pull/1251) - thanks [@​johnnyreilly](https://togithub.com/johnnyreilly), [@​jonwallsten](https://togithub.com/jonwallsten), [@​sokra](https://togithub.com/sokra), [@​appzuka](https://togithub.com/appzuka), [@​alexander-akait](https://togithub.com/alexander-akait)
--- ### Configuration :date: **Schedule**: "after 9am and before 3pm" (UTC). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6e8c8b0245d..8d18b642d4c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -45,7 +45,7 @@ "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^10.0.0", - "ts-loader": "^8.0.0", + "ts-loader": "^9.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", "webpack-cli": "^4.0.0", From 8c5e224a9c460b39c9f6ed0e6a2c84f99ecbb68a Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Tue, 20 Apr 2021 10:35:37 -0700 Subject: [PATCH 105/333] chore: migrate to owl bot (#154) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: copy files from googleapis-gen e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a * chore: run the post processor * 🦉 Updates from OwlBot Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++ .../bigquery-storage/.github/.OwlBot.yaml | 30 ++++++++++++ .../.kokoro/release/publish.cfg | 40 ---------------- .../bigquery-storage/.repo-metadata.json | 17 +++---- .../bigquery-storage/{synth.py => owlbot.py} | 22 ++------- handwritten/bigquery-storage/synth.metadata | 46 ------------------- 6 files changed, 47 insertions(+), 112 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/.OwlBot.lock.yaml create mode 100644 handwritten/bigquery-storage/.github/.OwlBot.yaml rename handwritten/bigquery-storage/{synth.py => owlbot.py} (51%) delete mode 100644 handwritten/bigquery-storage/synth.metadata diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml new file mode 100644 index 00000000000..278e98ced87 --- /dev/null +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:c3eae37a355402067b97cbeb6f5a7d2dd87aecfd064aeb2d2ea0bde40778cf68 + image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + diff --git a/handwritten/bigquery-storage/.github/.OwlBot.yaml b/handwritten/bigquery-storage/.github/.OwlBot.yaml new file mode 100644 index 00000000000..52a82ebed51 --- /dev/null +++ b/handwritten/bigquery-storage/.github/.OwlBot.yaml @@ -0,0 +1,30 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +docker: + image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + +deep-preserve-regex: + - /owl-bot-staging/v1alpha2 + - /owl-bot-staging/v1beta2 + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a + diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index bbfadda164d..6e84cf465c9 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -1,23 +1,3 @@ -# Get npm token from Keystore -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_npm_token" - backend_type: FASTCONFIGPUSH - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - before_action { fetch_keystore { keystore_resource { @@ -27,26 +7,6 @@ before_action { } } -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } -} - -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} - env_vars: { key: "SECRET_MANAGER_KEYS" value: "npm_publish_token,releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index 1c173eeacf9..aa908c00509 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -1,14 +1,15 @@ { - "name": "bigquerystorage", - "name_pretty": "Google BigQuery Storage", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", - "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", - "issue_tracker": "https://b.corp.google.com/savedsearches/559654", + "distribution_name": "@google-cloud/bigquery-storage", "release_level": "ga", - "language": "nodejs", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", "repo": "googleapis/nodejs-bigquery-storage", - "distribution_name": "@google-cloud/bigquery-storage", - "api_id": "bigquerystorage.googleapis.com", + "default_version": "v1", + "language": "nodejs", "requires_billing": true, + "issue_tracker": "https://b.corp.google.com/savedsearches/559654", + "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", + "name": "bigquerystorage", + "name_pretty": "Google BigQuery Storage", + "api_id": "bigquerystorage.googleapis.com", "codeowner_team": "@googleapis/api-bigquery" } diff --git a/handwritten/bigquery-storage/synth.py b/handwritten/bigquery-storage/owlbot.py similarity index 51% rename from handwritten/bigquery-storage/synth.py rename to handwritten/bigquery-storage/owlbot.py index 1cb8ff00d59..44b15e78f18 100644 --- a/handwritten/bigquery-storage/synth.py +++ b/handwritten/bigquery-storage/owlbot.py @@ -12,23 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. """This script is used to synthesize generated parts of this library.""" -import synthtool as s -import synthtool.gcp as gcp import synthtool.languages.node as node -import logging -logging.basicConfig(level=logging.DEBUG) -AUTOSYNTH_MULTIPLE_COMMITS = True - -# Run the gapic generator -gapic = gcp.GAPICBazel() -name = 'bigquery-storage' -versions = ['v1beta1', 'v1'] -for version in versions: - library = gapic.node_library(name, version, proto_path=f'google/cloud/bigquery/storage/{version}') - s.copy(library, excludes=['package.json', 'README.md', 'src/index.ts']) -# Copy common templates -common_templates = gcp.CommonTemplates() -templates = common_templates.node_library(source_location='build/src') -s.copy(templates, excludes=[]) -node.postprocess_gapic_library() +node.owlbot_main( + staging_excludes=['package.json', 'README.md', 'src/index.ts'], + templates_excludes=['src/index.ts'] +) diff --git a/handwritten/bigquery-storage/synth.metadata b/handwritten/bigquery-storage/synth.metadata deleted file mode 100644 index d7fca4f6a6b..00000000000 --- a/handwritten/bigquery-storage/synth.metadata +++ /dev/null @@ -1,46 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/nodejs-bigquery-storage.git", - "sha": "abe5106d1faba875bc419693bc9ede1a0912f1e8" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c539b9b08b3366ee00c0ec1950f4df711552a269", - "internalRef": "365759522" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "57c23fa5705499a4181095ced81f0ee0933b64f6" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "bigquery-storage", - "apiVersion": "v1beta1", - "language": "nodejs", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "bigquery-storage", - "apiVersion": "v1", - "language": "nodejs", - "generator": "bazel" - } - } - ] -} \ No newline at end of file From 7fbf435ae00e9565c0a8607b72ee536412231ad7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 Apr 2021 23:28:40 +0000 Subject: [PATCH 106/333] chore: release 2.4.0 (#146) :robot: I have created a release \*beep\* \*boop\* --- ## [2.4.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.3.0...v2.4.0) (2021-04-20) ### Features * add a Arrow compression options (Only LZ4 for now). ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) * Return schema on first ReadRowsResponse ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 00c433bccd8..3e803182fad 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [2.4.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.3.0...v2.4.0) (2021-04-20) + + +### Features + +* add a Arrow compression options (Only LZ4 for now). ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) +* Return schema on first ReadRowsResponse ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) + ## [2.3.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.3...v2.3.0) (2021-01-09) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 8d18b642d4c..7dba8e80b7d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.3.0", + "version": "2.4.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 0cd85b444fea3b045a6592c6e217f772f3f02687 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Thu, 6 May 2021 17:49:02 -0700 Subject: [PATCH 107/333] fix(deps): require google-gax v2.12.0 (#158) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7dba8e80b7d..9765cde94ea 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -29,7 +29,7 @@ "api-documenter": "api-documenter yaml --input-folder=temp" }, "dependencies": { - "google-gax": "^2.9.2" + "google-gax": "^2.12.0" }, "devDependencies": { "@types/mocha": "^8.0.0", From 70de962b4c37c5dd45d462478f30ebdf5dda5e67 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 May 2021 17:00:46 +0000 Subject: [PATCH 108/333] chore: new owl bot post processor docker image (#160) gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:f93bb861d6f12574437bb9aee426b71eafd63b419669ff0ed029f4b7e7162e3f --- .../.github/.OwlBot.lock.yaml | 5 +- .../.github/generated-files-bot.yml | 13 ++++ .../bigquery-storage/protos/protos.d.ts | 10 +-- handwritten/bigquery-storage/protos/protos.js | 20 +++--- .../src/v1/big_query_read_client.ts | 42 ++++++------- .../src/v1beta1/big_query_storage_client.ts | 62 +++++++++---------- .../test/gapic_big_query_read_v1.ts | 17 +++-- .../test/gapic_big_query_storage_v1beta1.ts | 32 ++++------ 8 files changed, 99 insertions(+), 102 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/generated-files-bot.yml diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 278e98ced87..a3a3420de72 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:c3eae37a355402067b97cbeb6f5a7d2dd87aecfd064aeb2d2ea0bde40778cf68 - image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - + image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + digest: sha256:f93bb861d6f12574437bb9aee426b71eafd63b419669ff0ed029f4b7e7162e3f diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml new file mode 100644 index 00000000000..1b3ef1c7837 --- /dev/null +++ b/handwritten/bigquery-storage/.github/generated-files-bot.yml @@ -0,0 +1,13 @@ +generatedFiles: +- path: '.kokoro/**' + message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/CODEOWNERS' + message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' +- path: '.github/workflows/**' + message: '`.github/workflows` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/generated-files-bot.+(yml|yaml)' + message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: 'README.md' + message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' +- path: 'samples/README.md' + message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 619dc695897..4ab3a82b13b 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4197,19 +4197,19 @@ export namespace google { public selector: string; /** HttpRule get. */ - public get: string; + public get?: (string|null); /** HttpRule put. */ - public put: string; + public put?: (string|null); /** HttpRule post. */ - public post: string; + public post?: (string|null); /** HttpRule delete. */ - public delete: string; + public delete?: (string|null); /** HttpRule patch. */ - public patch: string; + public patch?: (string|null); /** HttpRule custom. */ public custom?: (google.api.ICustomHttpPattern|null); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 6f315f5ffad..a948d7dd4df 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -9799,43 +9799,43 @@ /** * HttpRule get. - * @member {string} get + * @member {string|null|undefined} get * @memberof google.api.HttpRule * @instance */ - HttpRule.prototype.get = ""; + HttpRule.prototype.get = null; /** * HttpRule put. - * @member {string} put + * @member {string|null|undefined} put * @memberof google.api.HttpRule * @instance */ - HttpRule.prototype.put = ""; + HttpRule.prototype.put = null; /** * HttpRule post. - * @member {string} post + * @member {string|null|undefined} post * @memberof google.api.HttpRule * @instance */ - HttpRule.prototype.post = ""; + HttpRule.prototype.post = null; /** * HttpRule delete. - * @member {string} delete + * @member {string|null|undefined} delete * @memberof google.api.HttpRule * @instance */ - HttpRule.prototype["delete"] = ""; + HttpRule.prototype["delete"] = null; /** * HttpRule patch. - * @member {string} patch + * @member {string|null|undefined} patch * @memberof google.api.HttpRule * @instance */ - HttpRule.prototype.patch = ""; + HttpRule.prototype.patch = null; /** * HttpRule custom. diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index a5d7db865c2..f25c074a4f8 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -230,13 +230,14 @@ export class BigQueryReadClient { ]; for (const methodName of bigQueryReadStubMethods) { const callPromise = this.bigQueryReadStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, + stub => + (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err: Error | null | undefined) => () => { throw err; } @@ -432,11 +433,10 @@ export class BigQueryReadClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'read_session.table': request.readSession!.table || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'read_session.table': request.readSession!.table || '', + }); this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); } @@ -549,11 +549,10 @@ export class BigQueryReadClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - name: request.name || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + name: request.name || '', + }); this.initialize(); return this.innerApiCalls.splitReadStream(request, options, callback); } @@ -595,11 +594,10 @@ export class BigQueryReadClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - read_stream: request.readStream || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + read_stream: request.readStream || '', + }); this.initialize(); return this.innerApiCalls.readRows(request, options); } diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index da6f81c04e4..4defcb909f4 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -233,13 +233,14 @@ export class BigQueryStorageClient { ]; for (const methodName of bigQueryStorageStubMethods) { const callPromise = this.bigQueryStorageStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, + stub => + (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, (err: Error | null | undefined) => () => { throw err; } @@ -437,12 +438,11 @@ export class BigQueryStorageClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'table_reference.project_id': request.tableReference!.projectId || '', - 'table_reference.dataset_id': request.tableReference!.datasetId || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'table_reference.project_id': request.tableReference!.projectId || '', + 'table_reference.dataset_id': request.tableReference!.datasetId || '', + }); this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); } @@ -543,11 +543,10 @@ export class BigQueryStorageClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'session.name': request.session!.name || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'session.name': request.session!.name || '', + }); this.initialize(); return this.innerApiCalls.batchCreateReadSessionStreams( request, @@ -658,11 +657,10 @@ export class BigQueryStorageClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'stream.name': request.stream!.name || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'stream.name': request.stream!.name || '', + }); this.initialize(); return this.innerApiCalls.finalizeStream(request, options, callback); } @@ -776,11 +774,10 @@ export class BigQueryStorageClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'original_stream.name': request.originalStream!.name || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'original_stream.name': request.originalStream!.name || '', + }); this.initialize(); return this.innerApiCalls.splitReadStream(request, options, callback); } @@ -822,11 +819,10 @@ export class BigQueryStorageClient { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'read_position.stream.name': request.readPosition!.stream!.name || '', - }); + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + 'read_position.stream.name': request.readPosition!.stream!.name || '', + }); this.initialize(); return this.innerApiCalls.readRows(request, options); } diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 6d69543ebaf..e7feeb4724d 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -28,10 +28,9 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message).toObject( - instance as protobuf.Message, - {defaults: true} - ); + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( filledObject ) as T; @@ -209,9 +208,8 @@ describe('v1.BigQueryReadClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadSession() ); - client.innerApiCalls.createReadSession = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.createReadSession = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.createReadSession( request, @@ -322,9 +320,8 @@ describe('v1.BigQueryReadClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() ); - client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.splitReadStream = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.splitReadStream( request, diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 471cebc8ec7..b4821b15682 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -28,10 +28,9 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message).toObject( - instance as protobuf.Message, - {defaults: true} - ); + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( filledObject ) as T; @@ -217,9 +216,8 @@ describe('v1beta1.BigQueryStorageClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() ); - client.innerApiCalls.createReadSession = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.createReadSession = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.createReadSession( request, @@ -303,9 +301,8 @@ describe('v1beta1.BigQueryStorageClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() ); - client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( - expectedResponse - ); + client.innerApiCalls.batchCreateReadSessionStreams = + stubSimpleCall(expectedResponse); const [response] = await client.batchCreateReadSessionStreams(request); assert.deepStrictEqual(response, expectedResponse); assert( @@ -337,9 +334,8 @@ describe('v1beta1.BigQueryStorageClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() ); - client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.batchCreateReadSessionStreams = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.batchCreateReadSessionStreams( request, @@ -455,9 +451,8 @@ describe('v1beta1.BigQueryStorageClient', () => { const expectedResponse = generateSampleMessage( new protos.google.protobuf.Empty() ); - client.innerApiCalls.finalizeStream = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.finalizeStream = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.finalizeStream( request, @@ -570,9 +565,8 @@ describe('v1beta1.BigQueryStorageClient', () => { const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() ); - client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback( - expectedResponse - ); + client.innerApiCalls.splitReadStream = + stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { client.splitReadStream( request, From eac594989424be05eecf4737c722a316555536b3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 May 2021 21:58:13 +0000 Subject: [PATCH 109/333] fix: use require() to load JSON protos (#161) The library is regenerated with gapic-generator-typescript v1.3.1. Committer: @alexander-fenster PiperOrigin-RevId: 372468161 Source-Link: https://github.com/googleapis/googleapis/commit/75880c3e6a6aa2597400582848e81bbbfac51dea Source-Link: https://github.com/googleapis/googleapis-gen/commit/77b18044813d4c8c415ff9ea68e76e307eb8e904 --- .../src/v1/big_query_read_client.ts | 18 ++---------------- .../src/v1beta1/big_query_storage_client.ts | 18 ++---------------- 2 files changed, 4 insertions(+), 32 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index f25c074a4f8..02d1095fa44 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -22,6 +22,7 @@ import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); /** * Client JSON configuration object, loaded from * `src/v1/big_query_read_client_config.json`. @@ -138,22 +139,7 @@ export class BigQueryReadClient { clientHeader.push(`${opts.libName}/${opts.libVersion}`); } // Load the applicable protos. - // For Node.js, pass the path to JSON proto file. - // For browsers, pass the JSON content. - - const nodejsProtoPath = path.join( - __dirname, - '..', - '..', - 'protos', - 'protos.json' - ); - this._protos = this._gaxGrpc.loadProto( - opts.fallback - ? // eslint-disable-next-line @typescript-eslint/no-var-requires - require('../../protos/protos.json') - : nodejsProtoPath - ); + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); // This API contains "path templates"; forward-slash-separated // identifiers to uniquely identify resources within the API. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 4defcb909f4..d779c6da55c 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -22,6 +22,7 @@ import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; import * as path from 'path'; import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); /** * Client JSON configuration object, loaded from * `src/v1beta1/big_query_storage_client_config.json`. @@ -138,22 +139,7 @@ export class BigQueryStorageClient { clientHeader.push(`${opts.libName}/${opts.libVersion}`); } // Load the applicable protos. - // For Node.js, pass the path to JSON proto file. - // For browsers, pass the JSON content. - - const nodejsProtoPath = path.join( - __dirname, - '..', - '..', - 'protos', - 'protos.json' - ); - this._protos = this._gaxGrpc.loadProto( - opts.fallback - ? // eslint-disable-next-line @typescript-eslint/no-var-requires - require('../../protos/protos.json') - : nodejsProtoPath - ); + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); // This API contains "path templates"; forward-slash-separated // identifiers to uniquely identify resources within the API. From 3eef6cd48789b780479b4f309ef2419927fc16fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 May 2021 23:12:49 +0000 Subject: [PATCH 110/333] chore: update gapic-generator-typescript to v1.3.2 (#162) Committer: @alexander-fenster PiperOrigin-RevId: 372656503 Source-Link: https://github.com/googleapis/googleapis/commit/6fa858c6489b1bbc505a7d7afe39f2dc45819c38 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d7c95df3ab1ea1b4c22a4542bad4924cc46d1388 --- handwritten/bigquery-storage/src/v1/big_query_read_client.ts | 1 - .../bigquery-storage/src/v1beta1/big_query_storage_client.ts | 1 - 2 files changed, 2 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 02d1095fa44..2fb656b8d20 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -19,7 +19,6 @@ /* global window */ import * as gax from 'google-gax'; import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; -import * as path from 'path'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index d779c6da55c..6cc1ecb78e6 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -19,7 +19,6 @@ /* global window */ import * as gax from 'google-gax'; import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; -import * as path from 'path'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); From ca5eb6632f3508f926e4b37faa8307c8c6d44fb9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 12 May 2021 01:46:24 +0000 Subject: [PATCH 111/333] chore: new owl bot post processor docker image (#164) gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:b94b09fb24bf804090b8cc60ee726ca161d5e5915151e417f8ef5d8bcfe73cbc --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index a3a3420de72..e26fd96dec1 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:f93bb861d6f12574437bb9aee426b71eafd63b419669ff0ed029f4b7e7162e3f + digest: sha256:b94b09fb24bf804090b8cc60ee726ca161d5e5915151e417f8ef5d8bcfe73cbc From 83bb06955e84cd9d7179a25e15ff72760f20f60f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 May 2021 11:01:03 -0700 Subject: [PATCH 112/333] feat: Add ZSTD compression as an option for Arrow. (#165) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add ZSTD compression as an option for Arrow. Committer: @emkornfield PiperOrigin-RevId: 374220891 Source-Link: https://github.com/googleapis/googleapis/commit/23efea9fc7bedfe53b24295ed84b5f873606edcb Source-Link: https://github.com/googleapis/googleapis-gen/commit/79c15da3a71c276e23aa2746f9fa243741763179 * 🦉 Updates from OwlBot Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/arrow.proto | 3 +++ .../cloud/bigquery/storage/v1/storage.proto | 6 ++--- .../cloud/bigquery/storage/v1/stream.proto | 22 +++++++++---------- .../bigquery-storage/protos/protos.d.ts | 3 ++- handwritten/bigquery-storage/protos/protos.js | 7 ++++++ .../bigquery-storage/protos/protos.json | 3 ++- 6 files changed, 26 insertions(+), 18 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 4b240f52139..514b77e617c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -52,6 +52,9 @@ message ArrowSerializationOptions { // LZ4 Frame (https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md) LZ4_FRAME = 1; + + // Zstandard compression. + ZSTD = 2; } // The compression codec to use for Arrow buffers in serialized record diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index a5fa2b9eb86..8ac56375044 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -69,8 +69,7 @@ service BigQueryRead { post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" body: "*" }; - option (google.api.method_signature) = - "parent,read_session,max_stream_count"; + option (google.api.method_signature) = "parent,read_session,max_stream_count"; } // Reads rows from the stream in the format prescribed by the ReadSession. @@ -99,8 +98,7 @@ service BigQueryRead { // original, primary, and residual, that original[0-j] = primary[0-j] and // original[j-n] = residual[0-m] once the streams have been read to // completion. - rpc SplitReadStream(SplitReadStreamRequest) - returns (SplitReadStreamResponse) { + rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" }; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 28b2ac1bbf3..291cf4115bc 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -75,10 +75,9 @@ message ReadSession { // Restricted to a maximum length for 1 MB. string row_restriction = 2; - // Optional. Options specific to the Apache Arrow output format. oneof output_format_serialization_options { - ArrowSerializationOptions arrow_serialization_options = 3 - [(google.api.field_behavior) = OPTIONAL]; + // Optional. Options specific to the Apache Arrow output format. + ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL]; } } @@ -86,12 +85,10 @@ message ReadSession { // `projects/{project_id}/locations/{location}/sessions/{session_id}`. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Time at which the session becomes invalid. After this time, - // subsequent requests to read this Session will return errors. The - // expire_time is automatically assigned and currently cannot be specified or - // updated. - google.protobuf.Timestamp expire_time = 2 - [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. The expire_time is + // automatically assigned and currently cannot be specified or updated. + google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Immutable. Data format of the output data. DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; @@ -111,11 +108,12 @@ message ReadSession { // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` string table = 6 [ (google.api.field_behavior) = IMMUTABLE, - (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } ]; - // Optional. Any modifiers which are applied when reading from the specified - // table. + // Optional. Any modifiers which are applied when reading from the specified table. TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. Read options for this session (e.g. column selection, filters). diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 4ab3a82b13b..fea18b2d33b 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -310,7 +310,8 @@ export namespace google { /** CompressionCodec enum. */ enum CompressionCodec { COMPRESSION_UNSPECIFIED = 0, - LZ4_FRAME = 1 + LZ4_FRAME = 1, + ZSTD = 2 } } diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index a948d7dd4df..4ead87bfb66 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -640,6 +640,7 @@ return "bufferCompression: enum value expected"; case 0: case 1: + case 2: break; } return null; @@ -666,6 +667,10 @@ case 1: message.bufferCompression = 1; break; + case "ZSTD": + case 2: + message.bufferCompression = 2; + break; } return message; }; @@ -707,11 +712,13 @@ * @enum {number} * @property {number} COMPRESSION_UNSPECIFIED=0 COMPRESSION_UNSPECIFIED value * @property {number} LZ4_FRAME=1 LZ4_FRAME value + * @property {number} ZSTD=2 ZSTD value */ ArrowSerializationOptions.CompressionCodec = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[0] = "COMPRESSION_UNSPECIFIED"] = 0; values[valuesById[1] = "LZ4_FRAME"] = 1; + values[valuesById[2] = "ZSTD"] = 2; return values; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 5ed51835500..6a76834bc84 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -51,7 +51,8 @@ "CompressionCodec": { "values": { "COMPRESSION_UNSPECIFIED": 0, - "LZ4_FRAME": 1 + "LZ4_FRAME": 1, + "ZSTD": 2 } } } From 3296d7e1d8d701906cf18b886c78e31327987b23 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 May 2021 20:32:21 +0000 Subject: [PATCH 113/333] chore: new owl bot post processor docker image (#167) gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:f4734af778c3d0eb58a6db0078907a87f2e53f3c7a6422363fc37ee52e02b25a --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/generated-files-bot.yml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index e26fd96dec1..043a606639e 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:b94b09fb24bf804090b8cc60ee726ca161d5e5915151e417f8ef5d8bcfe73cbc + digest: sha256:f4734af778c3d0eb58a6db0078907a87f2e53f3c7a6422363fc37ee52e02b25a diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml index 1b3ef1c7837..6b04910c0fb 100644 --- a/handwritten/bigquery-storage/.github/generated-files-bot.yml +++ b/handwritten/bigquery-storage/.github/generated-files-bot.yml @@ -11,3 +11,6 @@ generatedFiles: message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' - path: 'samples/README.md' message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' +ignoreAuthors: +- 'gcf-owl-bot[bot]' +- 'yoshi-automation' From 06804e3c4f3d9f7a51aa7f343695d24a5cee19e8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 21 May 2021 19:06:19 +0200 Subject: [PATCH 114/333] chore(deps): update dependency @types/node to v14 (#169) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [@types/node](https://togithub.com/DefinitelyTyped/DefinitelyTyped) | [`^13.7.1` -> `^14.0.0`](https://renovatebot.com/diffs/npm/@types%2fnode/13.13.52/14.17.0) | [![age](https://badges.renovateapi.com/packages/npm/@types%2fnode/14.17.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/@types%2fnode/14.17.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/@types%2fnode/14.17.0/compatibility-slim/13.13.52)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/@types%2fnode/14.17.0/confidence-slim/13.13.52)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: "after 9am and before 3pm" (UTC). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 9765cde94ea..9b9f6280e25 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -33,7 +33,7 @@ }, "devDependencies": { "@types/mocha": "^8.0.0", - "@types/node": "^13.7.1", + "@types/node": "^14.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", "gts": "^2.0.0", From 0263c5b5a5113e6741c1900ea3cef3014cc9666c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 25 May 2021 17:58:23 +0200 Subject: [PATCH 115/333] chore(deps): update dependency sinon to v11 (#170) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [sinon](https://sinonjs.org/) ([source](https://togithub.com/sinonjs/sinon)) | [`^10.0.0` -> `^11.0.0`](https://renovatebot.com/diffs/npm/sinon/10.0.0/11.1.0) | [![age](https://badges.renovateapi.com/packages/npm/sinon/11.1.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/sinon/11.1.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/sinon/11.1.0/compatibility-slim/10.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/sinon/11.1.0/confidence-slim/10.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
sinonjs/sinon ### [`v11.1.0`](https://togithub.com/sinonjs/sinon/blob/master/CHANGELOG.md#​1110--2021-05-25) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v11.0.0...31be9a5d5a4762ef01cb195f29024616dfee9ce8) \================== - Add sinon.promise() implementation ([#​2369](https://togithub.com/sinonjs/sinon/issues/2369)) - Set wrappedMethod on getters/setters ([#​2378](https://togithub.com/sinonjs/sinon/issues/2378)) - \[Docs] Update fake-server usage & descriptions ([#​2365](https://togithub.com/sinonjs/sinon/issues/2365)) - Fake docs improvement ([#​2360](https://togithub.com/sinonjs/sinon/issues/2360)) - Update nise to 5.1.0 (fixed [#​2318](https://togithub.com/sinonjs/sinon/issues/2318)) ### [`v11.0.0`](https://togithub.com/sinonjs/sinon/blob/master/CHANGELOG.md#​1100--2021-05-24) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v10.0.1...v11.0.0) \================== - Explicitly use samsam 6.0.2 with fix for [#​2345](https://togithub.com/sinonjs/sinon/issues/2345) - Update most packages ([#​2371](https://togithub.com/sinonjs/sinon/issues/2371)) - Update compatibility docs ([#​2366](https://togithub.com/sinonjs/sinon/issues/2366)) - Update packages (includes breaking fake-timers change, see [#​2352](https://togithub.com/sinonjs/sinon/issues/2352)) - Warn of potential memory leaks ([#​2357](https://togithub.com/sinonjs/sinon/issues/2357)) - Fix clock test errors ### [`v10.0.1`](https://togithub.com/sinonjs/sinon/blob/master/CHANGELOG.md#​1001--2021-04-08) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v10.0.0...v10.0.1) \================== - Upgrade sinon components (bumps y18n to 4.0.1) - Bump y18n from 4.0.0 to 4.0.1
--- ### Configuration 📅 **Schedule**: "after 9am and before 3pm" (UTC). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 9b9f6280e25..36bbc54cf92 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -44,7 +44,7 @@ "mocha": "^8.0.0", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^10.0.0", + "sinon": "^11.0.0", "ts-loader": "^9.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", From 4fa6a005ee8f2e856d1745d9a5e3a23229806fae Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 May 2021 20:48:56 +0000 Subject: [PATCH 116/333] fix: GoogleAdsError missing using generator version after 1.3.0 (#171) [PR](https://github.com/googleapis/gapic-generator-typescript/pull/878) within updated gapic-generator-typescript version 1.4.0 Committer: @summer-ji-eng PiperOrigin-RevId: 375759421 Source-Link: https://github.com/googleapis/googleapis/commit/95fa72fdd0d69b02d72c33b37d1e4cc66d4b1446 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f40a34377ad488a7c2bc3992b3c8d5faf5a15c46 --- handwritten/bigquery-storage/src/v1/big_query_read_client.ts | 2 ++ .../bigquery-storage/src/v1beta1/big_query_storage_client.ts | 2 ++ 2 files changed, 4 insertions(+) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 2fb656b8d20..ceeb38bd118 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -133,6 +133,8 @@ export class BigQueryReadClient { } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { clientHeader.push(`${opts.libName}/${opts.libVersion}`); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 6cc1ecb78e6..125c0224618 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -133,6 +133,8 @@ export class BigQueryStorageClient { } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { clientHeader.push(`${opts.libName}/${opts.libVersion}`); From c2fdd2610a77a14bf2270edc6aff20ae04038e7d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 7 Jun 2021 19:04:27 +0000 Subject: [PATCH 117/333] chore: Report warning on `.github/workflows/ci.yaml` (#173) * fix: Report warning on `.github/workflows/ci.yaml` Not all files in `.github/workflows` are managed, only `ci.yaml`. Related false-positive: https://github.com/googleapis/repo-automation-bots/pull/1952#issuecomment-856142886 * fix: Report warning on `.github/workflows/ci.yaml` Not all files in `.github/workflows` are managed, only `ci.yaml`. Source-Link: https://github.com/googleapis/synthtool/commit/2430f8d90ed8a508e8422a3a7191e656d5a6bf53 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:14aaee566d6fc07716bb92da416195156e47a4777e7d1cd2bb3e28c46fe30fe2 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/generated-files-bot.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 043a606639e..3a93af921f1 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:f4734af778c3d0eb58a6db0078907a87f2e53f3c7a6422363fc37ee52e02b25a + digest: sha256:14aaee566d6fc07716bb92da416195156e47a4777e7d1cd2bb3e28c46fe30fe2 diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml index 6b04910c0fb..7bb7ce54c58 100644 --- a/handwritten/bigquery-storage/.github/generated-files-bot.yml +++ b/handwritten/bigquery-storage/.github/generated-files-bot.yml @@ -3,8 +3,8 @@ generatedFiles: message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' - path: '.github/CODEOWNERS' message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' -- path: '.github/workflows/**' - message: '`.github/workflows` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/workflows/ci.yaml' + message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' - path: '.github/generated-files-bot.+(yml|yaml)' message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' - path: 'README.md' From 16c92a7f49e74bbec221f283fe15c57d26597d1f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Jun 2021 13:24:49 -0700 Subject: [PATCH 118/333] chore: release 2.5.0 (#166) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 14 ++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 3e803182fad..81243e9ac06 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [2.5.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.4.0...v2.5.0) (2021-06-07) + + +### Features + +* Add ZSTD compression as an option for Arrow. ([#165](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/165)) ([dc5a1d0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/dc5a1d00f92f09dafbf0d3b1a9bf5ea4b5c43103)) + + +### Bug Fixes + +* **deps:** require google-gax v2.12.0 ([#158](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/158)) ([3347edd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/3347edd3781d7a37ae6a50b0d6885365bc2e4b2f)) +* GoogleAdsError missing using generator version after 1.3.0 ([#171](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/171)) ([8504761](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/850476101d06f4c3f903fb10ebb6709c1a6ffa95)) +* use require() to load JSON protos ([#161](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/161)) ([a16129f](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/a16129f7a56882e3070fa79f29b8b6018e7cd651)) + ## [2.4.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.3.0...v2.4.0) (2021-04-20) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 36bbc54cf92..f0bff04eb1f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.4.0", + "version": "2.5.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 8032eda70bb8be0cbd64fdec7bffe226aaf2c9c8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 9 Jun 2021 19:24:51 +0000 Subject: [PATCH 119/333] chore(nodejs): use cloud-rad publication process (#1112) (#175) VERSION is used in @google-cloud/cloud-rad to publish ref docs for a particular version. Pass VERSION in via Stubby or Fusion. Source-Link: https://github.com/googleapis/synthtool/commit/740366bbb9a7e0f4b77fc75dc26be1d3a376c3e0 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:bbdd52de226c00df3356cdf25460397b429ab49552becca645adbc412f6a4ed5 --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/release/docs-devsite.cfg | 2 +- .../.kokoro/release/docs-devsite.sh | 48 ++----------------- handwritten/bigquery-storage/.trampolinerc | 3 +- 4 files changed, 7 insertions(+), 48 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 3a93af921f1..f6467c3a493 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:14aaee566d6fc07716bb92da416195156e47a4777e7d1cd2bb3e28c46fe30fe2 + digest: sha256:bbdd52de226c00df3356cdf25460397b429ab49552becca645adbc412f6a4ed5 diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg index f8aaa1f5eb5..8bcc62cc814 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 7657be3377a..2198e67fe92 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2019 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ set -eo pipefail -# build jsdocs (Python is installed on the Node 10 docker image). if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. @@ -25,47 +24,6 @@ if [[ -z "$CREDENTIALS" ]]; then cd $(dirname $0)/../.. fi -mkdir ./etc - npm install -npm run api-extractor -npm run api-documenter - -npm i json@9.0.6 -g -NAME=$(cat .repo-metadata.json | json name) - -mkdir ./_devsite -cp ./yaml/$NAME/* ./_devsite - -# Clean up TOC -# Delete SharePoint item, see https://github.com/microsoft/rushstack/issues/1229 -sed -i -e '1,3d' ./yaml/toc.yml -sed -i -e 's/^ //' ./yaml/toc.yml -# Delete interfaces from TOC (name and uid) -sed -i -e '/name: I[A-Z]/{N;d;}' ./yaml/toc.yml -sed -i -e '/^ *\@google-cloud.*:interface/d' ./yaml/toc.yml - -cp ./yaml/toc.yml ./_devsite/toc.yml - -# create docs.metadata, based on package.json and .repo-metadata.json. -pip install -U pip -python3 -m pip install --user gcp-docuploader -python3 -m docuploader create-metadata \ - --name=$NAME \ - --version=$(cat package.json | json version) \ - --language=$(cat .repo-metadata.json | json language) \ - --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ - --product-page=$(cat .repo-metadata.json | json product_documentation) \ - --github-repository=$(cat .repo-metadata.json | json repo) \ - --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) -cp docs.metadata ./_devsite/docs.metadata - -# deploy the docs. -if [[ -z "$CREDENTIALS" ]]; then - CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account -fi -if [[ -z "$BUCKET" ]]; then - BUCKET=docs-staging-v2 -fi - -python3 -m docuploader upload ./_devsite --destination-prefix docfx --credentials $CREDENTIALS --staging-bucket $BUCKET +npm install --no-save @google-cloud/cloud-rad@^0.2.5 +npx @google-cloud/cloud-rad \ No newline at end of file diff --git a/handwritten/bigquery-storage/.trampolinerc b/handwritten/bigquery-storage/.trampolinerc index 164613b9e6a..d4fcb894483 100644 --- a/handwritten/bigquery-storage/.trampolinerc +++ b/handwritten/bigquery-storage/.trampolinerc @@ -20,7 +20,8 @@ required_envvars+=( # Add env vars which are passed down into the container here. pass_down_envvars+=( - "AUTORELEASE_PR" + "AUTORELEASE_PR", + "VERSION" ) # Prevent unintentional override on the default image. From a484c79b6ffaf65ac79fe1943333f6bcc18e1443 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Jun 2021 06:02:42 +0000 Subject: [PATCH 120/333] build: add auto-approve to Node libraries (#1100) (#176) * build: add auto-approve to Node libraries Co-authored-by: Benjamin E. Coe Source-Link: https://github.com/googleapis/synthtool/commit/5cae043787729a908ed0cab28ca27baf9acee3c4 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:65aa68f2242c172345d7c1e780bced839bfdc344955d6aa460aa63b4481d93e5 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/CODEOWNERS | 3 +++ handwritten/bigquery-storage/.github/auto-approve.yml | 7 +++++++ 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 handwritten/bigquery-storage/.github/auto-approve.yml diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index f6467c3a493..1b520297430 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:bbdd52de226c00df3356cdf25460397b429ab49552becca645adbc412f6a4ed5 + digest: sha256:65aa68f2242c172345d7c1e780bced839bfdc344955d6aa460aa63b4481d93e5 diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS index 892293aecca..6d82f1f7467 100644 --- a/handwritten/bigquery-storage/.github/CODEOWNERS +++ b/handwritten/bigquery-storage/.github/CODEOWNERS @@ -7,3 +7,6 @@ # The yoshi-nodejs team is the default owner for nodejs repositories. * @googleapis/yoshi-nodejs @googleapis/api-bigquery + +# The github automation team is the default owner for the auto-approve file. +.github/auto-approve.yml @googleapis/github-automation diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml new file mode 100644 index 00000000000..903697974b0 --- /dev/null +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -0,0 +1,7 @@ +rules: +- author: "release-please[bot]" + title: "^chore: release" + changedFiles: + - "package\\.json$" + - "CHANGELOG\\.md$" + maxFiles: 3 \ No newline at end of file From 84562b23133d7786e1de6c9dfeaa463168bb5834 Mon Sep 17 00:00:00 2001 From: "F. Hinkelmann" Date: Thu, 10 Jun 2021 23:02:22 +0200 Subject: [PATCH 121/333] chore(nodejs): remove api-extractor dependencies (#177) --- handwritten/bigquery-storage/package.json | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index f0bff04eb1f..c578fd7a92e 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -24,9 +24,7 @@ "system-test": "c8 mocha build/system-test", "test": "c8 mocha build/test", "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", - "precompile": "gts clean", - "api-extractor": "api-extractor run --local", - "api-documenter": "api-documenter yaml --input-folder=temp" + "precompile": "gts clean" }, "dependencies": { "google-gax": "^2.12.0" @@ -48,9 +46,7 @@ "ts-loader": "^9.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", - "webpack-cli": "^4.0.0", - "@microsoft/api-documenter": "^7.8.10", - "@microsoft/api-extractor": "^7.8.10" + "webpack-cli": "^4.0.0" }, "engines": { "node": ">=10" From f0c51441f022107577bb476a20ddb05412c33815 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Jun 2021 19:10:32 +0000 Subject: [PATCH 122/333] build: remove errant comma (#1113) (#178) Source-Link: https://github.com/googleapis/synthtool/commit/41ccd8cd13ec31f4fb839cf8182aea3c7156e19d Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:c9c7828c165b1985579098978877935ee52dda2b1b538087514fd24fa2443e7a --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.trampolinerc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 1b520297430..e7c45fd36bc 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:65aa68f2242c172345d7c1e780bced839bfdc344955d6aa460aa63b4481d93e5 + digest: sha256:c9c7828c165b1985579098978877935ee52dda2b1b538087514fd24fa2443e7a diff --git a/handwritten/bigquery-storage/.trampolinerc b/handwritten/bigquery-storage/.trampolinerc index d4fcb894483..5fc2253137d 100644 --- a/handwritten/bigquery-storage/.trampolinerc +++ b/handwritten/bigquery-storage/.trampolinerc @@ -20,7 +20,7 @@ required_envvars+=( # Add env vars which are passed down into the container here. pass_down_envvars+=( - "AUTORELEASE_PR", + "AUTORELEASE_PR" "VERSION" ) From c18d8912a5f036ec36535601fe7d535d60d61264 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 22 Jun 2021 20:22:40 +0000 Subject: [PATCH 123/333] fix: make request optional in all cases (#179) ... chore: update gapic-generator-ruby to the latest commit chore: release gapic-generator-typescript 1.5.0 Committer: @miraleung PiperOrigin-RevId: 380641501 Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/27e4c88b4048e5f56508d4e1aa417d60a3380892 --- .../src/v1/big_query_read_client.ts | 8 ++++---- .../src/v1beta1/big_query_storage_client.ts | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index ceeb38bd118..6083fd59502 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -301,7 +301,7 @@ export class BigQueryReadClient { // -- Service calls -- // ------------------- createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, options?: CallOptions ): Promise< [ @@ -382,7 +382,7 @@ export class BigQueryReadClient { * const [response] = await client.createReadSession(request); */ createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, optionsOrCallback?: | CallOptions | Callback< @@ -428,7 +428,7 @@ export class BigQueryReadClient { return this.innerApiCalls.createReadSession(request, options, callback); } splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, options?: CallOptions ): Promise< [ @@ -498,7 +498,7 @@ export class BigQueryReadClient { * const [response] = await client.splitReadStream(request); */ splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, optionsOrCallback?: | CallOptions | Callback< diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 125c0224618..3168b16488b 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -304,7 +304,7 @@ export class BigQueryStorageClient { // -- Service calls -- // ------------------- createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, options?: CallOptions ): Promise< [ @@ -387,7 +387,7 @@ export class BigQueryStorageClient { * const [response] = await client.createReadSession(request); */ createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, optionsOrCallback?: | CallOptions | Callback< @@ -434,7 +434,7 @@ export class BigQueryStorageClient { return this.innerApiCalls.createReadSession(request, options, callback); } batchCreateReadSessionStreams( - request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, options?: CallOptions ): Promise< [ @@ -492,7 +492,7 @@ export class BigQueryStorageClient { * const [response] = await client.batchCreateReadSessionStreams(request); */ batchCreateReadSessionStreams( - request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, optionsOrCallback?: | CallOptions | Callback< @@ -542,7 +542,7 @@ export class BigQueryStorageClient { ); } finalizeStream( - request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, options?: CallOptions ): Promise< [ @@ -606,7 +606,7 @@ export class BigQueryStorageClient { * const [response] = await client.finalizeStream(request); */ finalizeStream( - request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, optionsOrCallback?: | CallOptions | Callback< @@ -652,7 +652,7 @@ export class BigQueryStorageClient { return this.innerApiCalls.finalizeStream(request, options, callback); } splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, options?: CallOptions ): Promise< [ @@ -723,7 +723,7 @@ export class BigQueryStorageClient { * const [response] = await client.splitReadStream(request); */ splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, optionsOrCallback?: | CallOptions | Callback< From 63737028d49aec057ef20d5dc4125df7dafdc9eb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Jun 2021 20:46:46 +0000 Subject: [PATCH 124/333] chore: release 2.5.1 (#180) :robot: I have created a release \*beep\* \*boop\* --- ### [2.5.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.0...v2.5.1) (2021-06-22) ### Bug Fixes * make request optional in all cases ([#179](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/179)) ([b0beaaa](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b0beaaa280e7599f75e0a439f4ecd4a9a6c059ad)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 81243e9ac06..41d98417af1 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.5.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.0...v2.5.1) (2021-06-22) + + +### Bug Fixes + +* make request optional in all cases ([#179](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/179)) ([b0beaaa](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b0beaaa280e7599f75e0a439f4ecd4a9a6c059ad)) + ## [2.5.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.4.0...v2.5.0) (2021-06-07) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c578fd7a92e..12cdea86558 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.5.0", + "version": "2.5.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From a73e876781f674cefb40d05e70a623af80500ae7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Jun 2021 17:54:31 +0000 Subject: [PATCH 125/333] build(node): don't throw on deprecation in unit tests (#183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #1134 🦕 Removes the commit body and relative PR# from the commit message. For example, for this commit: https://github.com/googleapis/synthtool/commit/9763f20e4b7bb1091082462b2f7970e965d0d414 `post-processor-changes.txt` would contain ``` build: enable npm for php/python builds Source-Link: https://github.com/googleapis/synthtool/commit/9763f20e4b7bb1091082462b2f7970e965d0d414 ``` instead of ``` build: enable npm for php/python builds (#1133) * build: enable npm for php/python builds * update comment Source-Link: https://github.com/googleapis/synthtool/commit/9763f20e4b7bb1091082462b2f7970e965d0d414 ``` Source-Link: https://github.com/googleapis/synthtool/commit/e934b93402284f834b510ebbf421864e881dce02 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:805e2e389eafefa5ed484c30b83a7a875e6b1c7ee125d812e8b01ecc531c3fac --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index e7c45fd36bc..7b83ac87d91 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:c9c7828c165b1985579098978877935ee52dda2b1b538087514fd24fa2443e7a + digest: sha256:805e2e389eafefa5ed484c30b83a7a875e6b1c7ee125d812e8b01ecc531c3fac From b1f418ee7f9dd1fa8821f1e07706e8ccd24a7cb3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Jun 2021 18:18:28 +0000 Subject: [PATCH 126/333] build(node): do not throw on deprecation (#1140) (#184) Refs https://github.com/googleapis/nodejs-service-usage/issues/22 Source-Link: https://github.com/googleapis/synthtool/commit/6d26b13debbfe3c6a6a9f9f1914c5bccf1e6fadc Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:e59b73e911585903ee6b8a1c5246e93d9e9463420f597b6eb2e4b616ee8a0fee --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 7b83ac87d91..26e91bb2900 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:805e2e389eafefa5ed484c30b83a7a875e6b1c7ee125d812e8b01ecc531c3fac + digest: sha256:e59b73e911585903ee6b8a1c5246e93d9e9463420f597b6eb2e4b616ee8a0fee diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 5d6383fcb78..b5646aeb628 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -32,6 +32,9 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ART } trap cleanup EXIT HUP fi +# Unit tests exercise the entire API surface, which may include +# deprecation warnings: +export MOCHA_THROW_DEPRECATION=false npm test # codecov combines coverage across integration and unit tests. Include From d0b59ab0a4a456d510157bc71be5152834cc7227 Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Tue, 29 Jun 2021 11:24:53 -0400 Subject: [PATCH 127/333] fix(deps): google-gax v2.17.0 with mTLS (#185) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 12cdea86558..969bef6e40f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,7 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^2.12.0" + "google-gax": "^2.17.0" }, "devDependencies": { "@types/mocha": "^8.0.0", From 26f9833e168dd8c5c58289b1c9bee83a0f0867a5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Jun 2021 15:41:11 +0000 Subject: [PATCH 128/333] build: auto-approve renovate-bot PRs for minor updates (#1145) (#187) Source-Link: https://github.com/googleapis/synthtool/commit/39652e3948f455fd0b77535a0145eeec561a3706 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:41d5457ff79c3945782ab7e23bf4d617fd7bf3f2b03b6d84808010f7d2e10ca2 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/auto-approve.yml | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 26e91bb2900..9d507eeeb02 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:e59b73e911585903ee6b8a1c5246e93d9e9463420f597b6eb2e4b616ee8a0fee + digest: sha256:41d5457ff79c3945782ab7e23bf4d617fd7bf3f2b03b6d84808010f7d2e10ca2 diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml index 903697974b0..a79ba66c2c0 100644 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -4,4 +4,9 @@ rules: changedFiles: - "package\\.json$" - "CHANGELOG\\.md$" - maxFiles: 3 \ No newline at end of file + maxFiles: 3 +- author: "renovate-bot" + title: "^(fix\\(deps\\)|chore\\(deps\\)):" + changedFiles: + - "/package\\.json$" + maxFiles: 2 From f7c503c229640a2442396495f1978f1c794f3655 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Jun 2021 16:10:30 +0000 Subject: [PATCH 129/333] chore: release 2.5.2 (#186) :robot: I have created a release \*beep\* \*boop\* --- ### [2.5.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.1...v2.5.2) (2021-06-30) ### Bug Fixes * **deps:** google-gax v2.17.0 with mTLS ([#185](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/185)) ([1e9b856](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/1e9b8560cb3b60a60035c965ba1dabc24ad8f0c0)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 41d98417af1..3923e5a09c7 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.5.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.1...v2.5.2) (2021-06-30) + + +### Bug Fixes + +* **deps:** google-gax v2.17.0 with mTLS ([#185](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/185)) ([1e9b856](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/1e9b8560cb3b60a60035c965ba1dabc24ad8f0c0)) + ### [2.5.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.0...v2.5.1) (2021-06-22) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 969bef6e40f..ce6d01ecc34 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.5.1", + "version": "2.5.2", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 606ef561ab6d26e05e0bc007300bc825037c2d71 Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Mon, 12 Jul 2021 17:46:26 -0400 Subject: [PATCH 130/333] fix(deps): google-gax v2.17.1 (#188) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index ce6d01ecc34..cdfdf19eea6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,7 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^2.17.0" + "google-gax": "^2.17.1" }, "devDependencies": { "@types/mocha": "^8.0.0", From b00406760490287e217121027ec06ecb57bff22d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 16 Jul 2021 19:10:11 +0000 Subject: [PATCH 131/333] fix: Updating WORKSPACE files to use the newest version of the Typescript generator. (#190) Also removing the explicit generator tag for the IAMPolicy mixin for the kms and pubsub APIS as the generator will now read it from the .yaml file. PiperOrigin-RevId: 385101839 Source-Link: https://github.com/googleapis/googleapis/commit/80f404215a9346259db760d80d0671f28c433453 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d3509d2520fb8db862129633f1cf8406d17454e1 --- .../bigquery-storage/src/v1/big_query_read_client.ts | 11 ++++++++++- .../src/v1beta1/big_query_storage_client.ts | 11 ++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 6083fd59502..92ff7ef13aa 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -41,6 +41,7 @@ const version = require('../../../package.json').version; export class BigQueryReadClient { private _terminated = false; private _opts: ClientOptions; + private _providedCustomServicePath: boolean; private _gaxModule: typeof gax | typeof gax.fallback; private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; @@ -52,6 +53,7 @@ export class BigQueryReadClient { longrunning: {}, batching: {}, }; + warn: (code: string, message: string, warnType?: string) => void; innerApiCalls: {[name: string]: Function}; pathTemplates: {[name: string]: gax.PathTemplate}; bigQueryReadStub?: Promise<{[name: string]: Function}>; @@ -95,6 +97,9 @@ export class BigQueryReadClient { const staticMembers = this.constructor as typeof BigQueryReadClient; const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); const port = opts?.port || staticMembers.port; const clientConfig = opts?.clientConfig ?? {}; const fallback = @@ -177,6 +182,9 @@ export class BigQueryReadClient { // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; } /** @@ -205,7 +213,8 @@ export class BigQueryReadClient { ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, - this._opts + this._opts, + this._providedCustomServicePath ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 3168b16488b..0f6da277a78 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -41,6 +41,7 @@ const version = require('../../../package.json').version; export class BigQueryStorageClient { private _terminated = false; private _opts: ClientOptions; + private _providedCustomServicePath: boolean; private _gaxModule: typeof gax | typeof gax.fallback; private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; @@ -52,6 +53,7 @@ export class BigQueryStorageClient { longrunning: {}, batching: {}, }; + warn: (code: string, message: string, warnType?: string) => void; innerApiCalls: {[name: string]: Function}; pathTemplates: {[name: string]: gax.PathTemplate}; bigQueryStorageStub?: Promise<{[name: string]: Function}>; @@ -95,6 +97,9 @@ export class BigQueryStorageClient { const staticMembers = this.constructor as typeof BigQueryStorageClient; const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); const port = opts?.port || staticMembers.port; const clientConfig = opts?.clientConfig ?? {}; const fallback = @@ -177,6 +182,9 @@ export class BigQueryStorageClient { // of calling the API is handled in `google-gax`, with this code // merely providing the destination and request information. this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; } /** @@ -206,7 +214,8 @@ export class BigQueryStorageClient { : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1beta1 .BigQueryStorage, - this._opts + this._opts, + this._providedCustomServicePath ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides From 70c7203ff3d491c4edd84c3c1d18e19d63418d45 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 00:00:31 +0000 Subject: [PATCH 132/333] build: switch to release-please release tagging (#1129) (#192) Requires https://github.com/googleapis/releasetool/pull/338 Source-Link: https://github.com/googleapis/synthtool/commit/1563597d28eca099d6411bbc29ecd09314a80746 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:06c970a44680229c1e8cefa701dbc93b80468ec4a34e6968475084e4ec1e2d7d --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/release-please.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 9d507eeeb02..b1434427024 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:41d5457ff79c3945782ab7e23bf4d617fd7bf3f2b03b6d84808010f7d2e10ca2 + digest: sha256:06c970a44680229c1e8cefa701dbc93b80468ec4a34e6968475084e4ec1e2d7d diff --git a/handwritten/bigquery-storage/.github/release-please.yml b/handwritten/bigquery-storage/.github/release-please.yml index 85344b92c7f..a1b41da3cb3 100644 --- a/handwritten/bigquery-storage/.github/release-please.yml +++ b/handwritten/bigquery-storage/.github/release-please.yml @@ -1 +1,2 @@ +handleGHRelease: true releaseType: node From 487372e47294e2e35987e2fead65b02b61258a22 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 00:12:19 +0000 Subject: [PATCH 133/333] chore: release 2.5.3 (#189) :robot: I have created a release \*beep\* \*boop\* --- ### [2.5.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.2...v2.5.3) (2021-07-21) ### Bug Fixes * **deps:** google-gax v2.17.1 ([#188](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/188)) ([e49f7ee](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e49f7ee0413948779842b3b9d4faf5addc4c4db6)) * Updating WORKSPACE files to use the newest version of the Typescript generator. ([#190](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/190)) ([8649cc6](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/8649cc6ae0a4e6ae807ba9e5af438ca0ffc9592a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 3923e5a09c7..efed7f17eaf 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [2.5.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.2...v2.5.3) (2021-07-21) + + +### Bug Fixes + +* **deps:** google-gax v2.17.1 ([#188](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/188)) ([e49f7ee](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e49f7ee0413948779842b3b9d4faf5addc4c4db6)) +* Updating WORKSPACE files to use the newest version of the Typescript generator. ([#190](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/190)) ([8649cc6](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/8649cc6ae0a4e6ae807ba9e5af438ca0ffc9592a)) + ### [2.5.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.1...v2.5.2) (2021-06-30) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index cdfdf19eea6..778f9d3f85e 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.5.2", + "version": "2.5.3", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From a71d63d5d0f6defce2c1277786f086ddae16ee31 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 30 Jul 2021 17:28:24 +0000 Subject: [PATCH 134/333] build: update auto-approve config for new validation (#1169) (#193) Co-authored-by: Anthonios Partheniou Source-Link: https://github.com/googleapis/synthtool/commit/df7fc1e3a6df4316920ab221431945cdf9aa7217 Post-Processor: gcr.io/repo-automation-bots/owlbot-nodejs:latest@sha256:6245a5be4c0406d9b2f04f380d8b88ffe4655df3cdbb57626f8913e8d620f4dd --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/auto-approve.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index b1434427024..9b2b9550d5e 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:06c970a44680229c1e8cefa701dbc93b80468ec4a34e6968475084e4ec1e2d7d + digest: sha256:6245a5be4c0406d9b2f04f380d8b88ffe4655df3cdbb57626f8913e8d620f4dd diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml index a79ba66c2c0..49cf942280a 100644 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -6,7 +6,7 @@ rules: - "CHANGELOG\\.md$" maxFiles: 3 - author: "renovate-bot" - title: "^(fix\\(deps\\)|chore\\(deps\\)):" + title: "^(fix|chore)\\(deps\\):" changedFiles: - - "/package\\.json$" + - "package\\.json$" maxFiles: 2 From eb5a1640002ff1605d704ee6da15a6740d499e71 Mon Sep 17 00:00:00 2001 From: "F. Hinkelmann" Date: Wed, 4 Aug 2021 16:02:20 -0400 Subject: [PATCH 135/333] chore(nodejs): update client ref docs link in metadata (#195) --- handwritten/bigquery-storage/.repo-metadata.json | 2 +- handwritten/bigquery-storage/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index aa908c00509..4283f1fbab0 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -7,7 +7,7 @@ "language": "nodejs", "requires_billing": true, "issue_tracker": "https://b.corp.google.com/savedsearches/559654", - "client_documentation": "https://googleapis.dev/nodejs/bigquerystorage/latest", + "client_documentation": "https://cloud.google.com/nodejs/docs/reference/bigquery-storage/latest", "name": "bigquerystorage", "name_pretty": "Google BigQuery Storage", "api_id": "bigquerystorage.googleapis.com", diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index c562b19865d..7e370998a22 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -249,7 +249,7 @@ Apache Version 2.0 See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/LICENSE) -[client-docs]: https://googleapis.dev/nodejs/bigquerystorage/latest +[client-docs]: https://cloud.google.com/nodejs/docs/reference/bigquery-storage/latest [product-docs]: https://cloud.google.com/bigquery/docs/reference/storage [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png [projects]: https://console.cloud.google.com/project From 25c1f6e1db5db8b5bffd0126b9d14b8e9c2dc7cb Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Mon, 16 Aug 2021 22:46:34 -0400 Subject: [PATCH 136/333] fix(deps): google-gax v2.24.1 (#198) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 778f9d3f85e..97f3769f9de 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,7 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^2.17.1" + "google-gax": "^2.24.1" }, "devDependencies": { "@types/mocha": "^8.0.0", From 397e19199e5a2a20aa9d20fa647c04da51eb4793 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 17 Aug 2021 17:18:26 +0000 Subject: [PATCH 137/333] chore: release 2.5.4 (#199) :robot: I have created a release \*beep\* \*boop\* --- ### [2.5.4](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.3...v2.5.4) (2021-08-17) ### Bug Fixes * **deps:** google-gax v2.24.1 ([#198](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/198)) ([c6f70de](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/c6f70de43641ee7a00237884cf3f40bbf1bed502)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index efed7f17eaf..10b37dcd48e 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.5.4](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.3...v2.5.4) (2021-08-17) + + +### Bug Fixes + +* **deps:** google-gax v2.24.1 ([#198](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/198)) ([c6f70de](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/c6f70de43641ee7a00237884cf3f40bbf1bed502)) + ### [2.5.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.2...v2.5.3) (2021-07-21) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 97f3769f9de..cb9d938c24d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.5.3", + "version": "2.5.4", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From cb7f8830bb48f3df0d89fac08264e03515f737e6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 23 Aug 2021 18:32:27 +0000 Subject: [PATCH 138/333] feat: turns on self-signed JWT feature flag (#200) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 392067151 Source-Link: https://github.com/googleapis/googleapis/commit/06345f7b95c4b4a3ffe4303f1f2984ccc304b2e0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/95882b37970e41e4cd51b22fa507cfd46dc7c4b6 --- .../bigquery-storage/src/v1/big_query_read_client.ts | 6 ++++++ .../src/v1beta1/big_query_storage_client.ts | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 92ff7ef13aa..5c84bf9eba5 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -124,6 +124,12 @@ export class BigQueryReadClient { // Save the auth object to the client, for use by other methods. this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + // Set the default scopes in auth client if needed. if (servicePath === staticMembers.servicePath) { this.auth.defaultScopes = staticMembers.scopes; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 0f6da277a78..0ddad79167e 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -124,6 +124,12 @@ export class BigQueryStorageClient { // Save the auth object to the client, for use by other methods. this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + // Set the default scopes in auth client if needed. if (servicePath === staticMembers.servicePath) { this.auth.defaultScopes = staticMembers.scopes; From 757c8343665c233f3e2b81d78fbd1fb95472356b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Aug 2021 18:56:26 +0000 Subject: [PATCH 139/333] chore: release 2.6.0 (#201) :robot: I have created a release \*beep\* \*boop\* --- ## [2.6.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.4...v2.6.0) (2021-08-23) ### Features * turns on self-signed JWT feature flag ([#200](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/200)) ([ef2206c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ef2206cf1087c23d144fbc4b50363efb4c6deab2)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 10b37dcd48e..cc2931a657f 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.6.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.4...v2.6.0) (2021-08-23) + + +### Features + +* turns on self-signed JWT feature flag ([#200](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/200)) ([ef2206c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ef2206cf1087c23d144fbc4b50363efb4c6deab2)) + ### [2.5.4](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.3...v2.5.4) (2021-08-17) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index cb9d938c24d..a20863fe302 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.5.4", + "version": "2.6.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 36e004085e2ab5570a0b63e3f498d5cea4e230f6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Aug 2021 23:38:50 +0000 Subject: [PATCH 140/333] chore: disable renovate dependency dashboard (#1194) (#202) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- .../bigquery-storage/.kokoro/continuous/node10/common.cfg | 2 +- .../bigquery-storage/.kokoro/continuous/node10/test.cfg | 2 +- .../bigquery-storage/.kokoro/presubmit/node10/common.cfg | 2 +- handwritten/bigquery-storage/.kokoro/samples-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- handwritten/bigquery-storage/README.md | 4 ++-- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 9b2b9550d5e..c45b239314f 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:6245a5be4c0406d9b2f04f380d8b88ffe4655df3cdbb57626f8913e8d620f4dd + digest: sha256:667a9e46a9aa5b80240ad164d55ac33bc9d6780b5ef42f125a41f0ad95bc1950 diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg index acc3b554c61..71061cf193b 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg @@ -7,7 +7,7 @@ action { } } -# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token before_action { fetch_keystore { keystore_resource { diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg index 468b8c7197a..609c0cf0a27 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg @@ -1,4 +1,4 @@ -# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token before_action { fetch_keystore { keystore_resource { diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg index acc3b554c61..71061cf193b 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg @@ -7,7 +7,7 @@ action { } } -# Bring in codecov.io master token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token +# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token before_action { fetch_keystore { keystore_resource { diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 950f8483428..f249d3e4a2e 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -39,7 +39,7 @@ if [ -f samples/package.json ]; then npm link ../ npm install cd .. - # If tests are running against master, configure flakybot + # If tests are running against main branch, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 319d1e0eda8..0a840452084 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -33,7 +33,7 @@ fi npm install -# If tests are running against master, configure flakybot +# If tests are running against main branch, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index b5646aeb628..af1ce7e33ca 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -21,7 +21,7 @@ export NPM_CONFIG_PREFIX=${HOME}/.npm-global cd $(dirname $0)/.. npm install -# If tests are running against master, configure flakybot +# If tests are running against main branch, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 7e370998a22..092e780f0a9 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -240,8 +240,8 @@ Contributions welcome! See the [Contributing Guide](https://github.com/googleapi Please note that this `README.md`, the `samples/README.md`, and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) are generated from a central template. To edit one of these files, make an edit -to its template in this -[directory](https://github.com/googleapis/synthtool/tree/master/synthtool/gcp/templates/node_library). +to its templates in +[directory](https://github.com/googleapis/synthtool). ## License From 8f0c9985d8e4868333812f4e64ddef9d2afe060f Mon Sep 17 00:00:00 2001 From: "Benjamin E. Coe" Date: Fri, 3 Sep 2021 13:46:15 -0400 Subject: [PATCH 141/333] fix(build): migrate to main branch (#204) --- .../bigquery-storage/.github/generated-files-bot.yml | 4 ++-- handwritten/bigquery-storage/README.md | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml index 7bb7ce54c58..992ccef4a13 100644 --- a/handwritten/bigquery-storage/.github/generated-files-bot.yml +++ b/handwritten/bigquery-storage/.github/generated-files-bot.yml @@ -8,9 +8,9 @@ generatedFiles: - path: '.github/generated-files-bot.+(yml|yaml)' message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' - path: 'README.md' - message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' + message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' - path: 'samples/README.md' - message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/master/.readme-partials.yaml' + message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' ignoreAuthors: - 'gcf-owl-bot[bot]' - 'yoshi-automation' diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 092e780f0a9..f7f2caa856b 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -6,7 +6,7 @@ [![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) [![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) -[![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery-storage/master.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery-storage) +[![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery-storage/main.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery-storage) @@ -15,7 +15,7 @@ Client for the BigQuery Storage API A comprehensive list of changes in each version may be found in -[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/CHANGELOG.md). +[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CHANGELOG.md). * [Google BigQuery Storage Node.js Client API Reference][client-docs] * [Google BigQuery Storage Documentation][product-docs] @@ -182,11 +182,11 @@ async function bigqueryStorageQuickstart() { ## Samples -Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/master/samples) directory. Each sample's `README.md` has instructions for running its sample. +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | -| BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | @@ -235,7 +235,7 @@ More Information: [Google Cloud Platform Launch Stages][launch_stages] ## Contributing -Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/CONTRIBUTING.md). +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CONTRIBUTING.md). Please note that this `README.md`, the `samples/README.md`, and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) @@ -247,7 +247,7 @@ to its templates in Apache Version 2.0 -See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/master/LICENSE) +See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/LICENSE) [client-docs]: https://cloud.google.com/nodejs/docs/reference/bigquery-storage/latest [product-docs]: https://cloud.google.com/bigquery/docs/reference/storage From 65519d720ec439d2a4619b20dffe7f99a7953fc4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 3 Sep 2021 10:59:50 -0700 Subject: [PATCH 142/333] chore: release 2.6.1 (#205) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index cc2931a657f..8d63a600e81 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.6.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.0...v2.6.1) (2021-09-03) + + +### Bug Fixes + +* **build:** migrate to main branch ([#204](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/204)) ([759c9f0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/759c9f0442f9cec7eec94055da87b17ba7ef18ad)) + ## [2.6.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.4...v2.6.0) (2021-08-23) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a20863fe302..311e2882432 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.6.0", + "version": "2.6.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 35834a8fcb0a9dd97724ea23f12bf796aebfb1da Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Sep 2021 21:12:28 +0000 Subject: [PATCH 143/333] chore: release 2.6.2 (#206) :robot: I have created a release \*beep\* \*boop\* --- ### [2.6.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.1...v2.6.2) (2021-09-07) ### Bug Fixes * **deps:** update dependency snappy to v7 ([#196](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/196)) ([37538ec](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/37538ec42815d0ce325416b4ee299ca3fb7b59fe)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 8d63a600e81..77c6ab23ab6 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [2.6.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.1...v2.6.2) (2021-09-07) + + +### Bug Fixes + +* **deps:** update dependency snappy to v7 ([#196](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/196)) ([37538ec](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/37538ec42815d0ce325416b4ee299ca3fb7b59fe)) + ### [2.6.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.0...v2.6.1) (2021-09-03) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 311e2882432..a5bd884b166 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.6.1", + "version": "2.6.2", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 36e772a447a89d38a94c65835c2daf4eebaad54e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 13 Sep 2021 23:18:49 +0000 Subject: [PATCH 144/333] build: enable release-trigger bot (#1212) (#207) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.github/release-trigger.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 handwritten/bigquery-storage/.github/release-trigger.yml diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index c45b239314f..73bbf7d3210 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:667a9e46a9aa5b80240ad164d55ac33bc9d6780b5ef42f125a41f0ad95bc1950 + digest: sha256:111973c0da7608bf1e60d070e5449d48826c385a6b92a56cb9203f1725d33c3d diff --git a/handwritten/bigquery-storage/.github/release-trigger.yml b/handwritten/bigquery-storage/.github/release-trigger.yml new file mode 100644 index 00000000000..d4ca94189e1 --- /dev/null +++ b/handwritten/bigquery-storage/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 33410adf3a860337c2a4326b49986a7d5165eb91 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 15 Sep 2021 10:51:19 -0700 Subject: [PATCH 145/333] docs: Align session length with public documentation feat: Expose estimated bytes that a session will scan. (#208) Committer: @emkornfield PiperOrigin-RevId: 396849937 --- .../cloud/bigquery/storage/v1/storage.proto | 2 +- .../cloud/bigquery/storage/v1/stream.proto | 5 +++ .../bigquery-storage/protos/protos.d.ts | 6 ++++ handwritten/bigquery-storage/protos/protos.js | 36 +++++++++++++++++++ .../bigquery-storage/protos/protos.json | 7 ++++ .../src/v1/big_query_read_client.ts | 2 +- 6 files changed, 56 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 8ac56375044..e86ad253cfa 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -62,7 +62,7 @@ service BigQueryRead { // limits are enforced based on the number of pre-filtered rows, so some // filters can lead to lopsided assignments. // - // Read sessions automatically expire 24 hours after they are created and do + // Read sessions automatically expire 6 hours after they are created and do // not require manual clean-up by the caller. rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { option (google.api.http) = { diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 291cf4115bc..51aeaac4655 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -126,6 +126,11 @@ message ReadSession { // in that case, the user will need to use a List method to get the streams // instead, which is not yet available. repeated ReadStream streams = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An estimate on the number of bytes this session will scan when + // all streams are completely consumed. This estimate is based on + // metadata from the table which might be incomplete or stale. + int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Information about a single stream that gets data out of the storage system. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index fea18b2d33b..bedf6603682 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1425,6 +1425,9 @@ export namespace google { /** ReadSession streams */ streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); + + /** ReadSession estimatedTotalBytesScanned */ + estimatedTotalBytesScanned?: (number|Long|string|null); } /** Represents a ReadSession. */ @@ -1463,6 +1466,9 @@ export namespace google { /** ReadSession streams. */ public streams: google.cloud.bigquery.storage.v1.IReadStream[]; + /** ReadSession estimatedTotalBytesScanned. */ + public estimatedTotalBytesScanned: (number|Long|string); + /** ReadSession schema. */ public schema?: ("avroSchema"|"arrowSchema"); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 4ead87bfb66..a1cde29224c 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -3204,6 +3204,7 @@ * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions * @property {Array.|null} [streams] ReadSession streams + * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned */ /** @@ -3294,6 +3295,14 @@ */ ReadSession.prototype.streams = $util.emptyArray; + /** + * ReadSession estimatedTotalBytesScanned. + * @member {number|Long} estimatedTotalBytesScanned + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -3351,6 +3360,8 @@ if (message.streams != null && message.streams.length) for (var i = 0; i < message.streams.length; ++i) $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) + writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); return writer; }; @@ -3414,6 +3425,9 @@ message.streams = []; message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); break; + case 12: + message.estimatedTotalBytesScanned = reader.int64(); + break; default: reader.skipType(tag & 7); break; @@ -3507,6 +3521,9 @@ return "streams." + error; } } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) + return "estimatedTotalBytesScanned: integer|Long expected"; return null; }; @@ -3575,6 +3592,15 @@ message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); } } + if (object.estimatedTotalBytesScanned != null) + if ($util.Long) + (message.estimatedTotalBytesScanned = $util.Long.fromValue(object.estimatedTotalBytesScanned)).unsigned = false; + else if (typeof object.estimatedTotalBytesScanned === "string") + message.estimatedTotalBytesScanned = parseInt(object.estimatedTotalBytesScanned, 10); + else if (typeof object.estimatedTotalBytesScanned === "number") + message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; + else if (typeof object.estimatedTotalBytesScanned === "object") + message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); return message; }; @@ -3600,6 +3626,11 @@ object.table = ""; object.tableModifiers = null; object.readOptions = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -3628,6 +3659,11 @@ for (var j = 0; j < message.streams.length; ++j) object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (typeof message.estimatedTotalBytesScanned === "number") + object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; + else + object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 6a76834bc84..b3dca4ec481 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -372,6 +372,13 @@ "options": { "(google.api.field_behavior)": "OUTPUT_ONLY" } + }, + "estimatedTotalBytesScanned": { + "type": "int64", + "id": 12, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } }, "nested": { diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 5c84bf9eba5..a722be8fec6 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -367,7 +367,7 @@ export class BigQueryReadClient { * limits are enforced based on the number of pre-filtered rows, so some * filters can lead to lopsided assignments. * - * Read sessions automatically expire 24 hours after they are created and do + * Read sessions automatically expire 6 hours after they are created and do * not require manual clean-up by the caller. * * @param {Object} request From f670c3505b76d7b26a76adc232f00c81727e699e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Sep 2021 08:29:44 -0700 Subject: [PATCH 146/333] feat: add BigQuery Storage Write API v1 (#209) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add BigQuery Storage Write API v1 Committer: @yirutang PiperOrigin-RevId: 397350004 Source-Link: https://github.com/googleapis/googleapis/commit/b4da4fd5eee314df550791fc287c1f1950127ee4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/67bcfcfa00a4113e968bac1a10d0ad0c1b7dc45b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjdiY2ZjZmEwMGE0MTEzZTk2OGJhYzFhMTBkMGFkMGMxYjdkYzQ1YiJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: fix system tests Co-authored-by: Owl Bot Co-authored-by: Takashi Matsuo --- .../cloud/bigquery/storage/v1/protobuf.proto | 48 + .../cloud/bigquery/storage/v1/storage.proto | 361 +- .../cloud/bigquery/storage/v1/stream.proto | 48 + .../cloud/bigquery/storage/v1/table.proto | 164 + .../bigquery-storage/protos/protos.d.ts | 9719 ++++-- handwritten/bigquery-storage/protos/protos.js | 28028 ++++++++++------ .../bigquery-storage/protos/protos.json | 1173 +- .../src/v1/big_query_read_client.ts | 126 + .../src/v1/big_query_read_client_config.json | 3 +- .../src/v1/big_query_read_proto_list.json | 4 +- .../src/v1/big_query_write_client.ts | 1183 + .../src/v1/big_query_write_client_config.json | 59 + .../src/v1/big_query_write_proto_list.json | 8 + .../src/v1/gapic_metadata.json | 69 + handwritten/bigquery-storage/src/v1/index.ts | 1 + .../system-test/fixtures/sample/src/index.js | 1 + .../system-test/fixtures/sample/src/index.ts | 11 +- .../test/gapic_big_query_read_v1.ts | 140 + .../test/gapic_big_query_write_v1.ts | 1115 + 19 files changed, 28047 insertions(+), 14214 deletions(-) create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client.ts create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto new file mode 100644 index 00000000000..f987467dd9f --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -0,0 +1,48 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "ProtoBufProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// ProtoSchema describes the schema of the serialized protocol buffer data rows. +message ProtoSchema { + // Descriptor for input message. The provided descriptor must be self + // contained, such that data rows sent can be fully decoded using only the + // single descriptor. For data rows that are compositions of multiple + // independent messages, this means the descriptor may need to be transformed + // to only use nested types: + // https://developers.google.com/protocol-buffers/docs/proto#nested + // + // For additional information for how proto types and values map onto BigQuery + // see: https://cloud.google.com/bigquery/docs/write-api#data_type_conversions + google.protobuf.DescriptorProto proto_descriptor = 1; +} + +message ProtoRows { + // A sequence of rows serialized as a Protocol Buffer. + // + // See https://developers.google.com/protocol-buffers/docs/overview for more + // information on deserializing this field. + repeated bytes serialized_rows = 1; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index e86ad253cfa..ab5a46cf180 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -22,7 +22,12 @@ import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/bigquery/storage/v1/arrow.proto"; import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/cloud/bigquery/storage/v1/protobuf.proto"; import "google/cloud/bigquery/storage/v1/stream.proto"; +import "google/cloud/bigquery/storage/v1/table.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; +import "google/rpc/status.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; @@ -105,6 +110,121 @@ service BigQueryRead { } } +// BigQuery Write API. +// +// The Write API can be used to write data to BigQuery. +// +// For supplementary information about the Write API, see: +// https://cloud.google.com/bigquery/docs/write-api +service BigQueryWrite { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.insertdata," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a write stream to the given table. + // Additionally, every table has a special stream named '_default' + // to which data can be written. This stream doesn't need to be created using + // CreateWriteStream. It is a stream that can be used simultaneously by any + // number of clients. Data written to this stream is considered committed as + // soon as an acknowledgement is received. + rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/datasets/*/tables/*}" + body: "write_stream" + }; + option (google.api.method_signature) = "parent,write_stream"; + } + + // Appends data to the given stream. + // + // If `offset` is specified, the `offset` is checked against the end of + // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an + // attempt is made to append to an offset beyond the current end of the stream + // or `ALREADY_EXISTS` if user provides an `offset` that has already been + // written to. User can retry with adjusted offset within the same RPC + // connection. If `offset` is not specified, append happens at the end of the + // stream. + // + // The response contains an optional offset at which the append + // happened. No offset information will be returned for appends to a + // default stream. + // + // Responses are received in the same order in which requests are sent. + // There will be one response for each successful inserted request. Responses + // may optionally embed error information if the originating AppendRequest was + // not successfully processed. + // + // The specifics of when successfully appended data is made visible to the + // table are governed by the type of stream: + // + // * For COMMITTED streams (which includes the default stream), data is + // visible immediately upon successful append. + // + // * For BUFFERED streams, data is made visible via a subsequent `FlushRows` + // rpc which advances a cursor to a newer offset in the stream. + // + // * For PENDING streams, data is not made visible until the stream itself is + // finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly + // committed via the `BatchCommitWriteStreams` rpc. + rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) { + option (google.api.http) = { + post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "write_stream"; + } + + // Gets information about a write stream. + rpc GetWriteStream(GetWriteStreamRequest) returns (WriteStream) { + option (google.api.http) = { + post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Finalize a write stream so that no new data can be appended to the + // stream. Finalize is not supported on the '_default' stream. + rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) { + option (google.api.http) = { + post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Atomically commits a group of `PENDING` streams that belong to the same + // `parent` table. + // + // Streams must be finalized before commit and cannot be committed multiple + // times. Once a stream is committed, data in the stream becomes available + // for read operations. + rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/datasets/*/tables/*}" + }; + option (google.api.method_signature) = "parent"; + } + + // Flushes rows to a BUFFERED stream. + // + // If users are appending rows to BUFFERED stream, flush operation is + // required in order for the rows to become available for reading. A + // Flush operation flushes up to any previously flushed offset in a BUFFERED + // stream, to the offset specified in the request. + // + // Flush is not supported on the _default stream, since it is not BUFFERED. + rpc FlushRows(FlushRowsRequest) returns (FlushRowsResponse) { + option (google.api.http) = { + post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "write_stream"; + } +} + // Request message for `CreateReadSession`. message CreateReadSessionRequest { // Required. The request project that owns the session, in the form of @@ -153,7 +273,7 @@ message ThrottleState { int32 throttle_percent = 1; } -// Estimated stream statistics for a given Stream. +// Estimated stream statistics for a given read Stream. message StreamStats { message Progress { // The fraction of rows assigned to the stream that have been processed by @@ -246,3 +366,242 @@ message SplitReadStreamResponse { // value indicates that the original stream can no longer be split. ReadStream remainder_stream = 2; } + +// Request message for `CreateWriteStream`. +message CreateWriteStreamRequest { + // Required. Reference to the table to which the stream belongs, in the format + // of `projects/{project}/datasets/{dataset}/tables/{table}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } + ]; + + // Required. Stream to be created. + WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for `AppendRows`. +// +// Due to the nature of AppendRows being a bidirectional streaming RPC, certain +// parts of the AppendRowsRequest need only be specified for the first request +// sent each time the gRPC network connection is opened/reopened. +message AppendRowsRequest { + // ProtoData contains the data rows and schema when constructing append + // requests. + message ProtoData { + // Proto schema used to serialize the data. This value only needs to be + // provided as part of the first request on a gRPC network connection, + // and will be ignored for subsequent requests on the connection. + ProtoSchema writer_schema = 1; + + // Serialized row data in protobuf message format. + // Currently, the backend expects the serialized rows to adhere to + // proto2 semantics when appending rows, particularly with respect to + // how default values are encoded. + ProtoRows rows = 2; + } + + // Required. The write_stream identifies the target of the append operation, and only + // needs to be specified as part of the first request on the gRPC connection. + // If provided for subsequent requests, it must match the value of the first + // request. + // + // For explicitly created write streams, the format is: + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` + // + // For the special default stream, the format is: + // `projects/{project}/datasets/{dataset}/tables/{table}/_default`. + string write_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; + + // If present, the write is only performed if the next append offset is same + // as the provided value. If not present, the write is performed at the + // current end of stream. Specifying a value for this field is not allowed + // when calling AppendRows for the '_default' stream. + google.protobuf.Int64Value offset = 2; + + // Input rows. The `writer_schema` field must be specified at the initial + // request and currently, it will be ignored if specified in following + // requests. Following requests must have data in the same format as the + // initial request. + oneof rows { + // Rows in proto format. + ProtoData proto_rows = 4; + } + + // Id set by client to annotate its identity. Only initial request setting is + // respected. + string trace_id = 6; +} + +// Response message for `AppendRows`. +message AppendRowsResponse { + // AppendResult is returned for successful append requests. + message AppendResult { + // The row offset at which the last append occurred. The offset will not be + // set if appending using default streams. + google.protobuf.Int64Value offset = 1; + } + + oneof response { + // Result if the append is successful. + AppendResult append_result = 1; + + // Error returned when problems were encountered. If present, + // it indicates rows were not accepted into the system. + // Users can retry or continue with other append requests within the + // same connection. + // + // Additional information about error signalling: + // + // ALREADY_EXISTS: Happens when an append specified an offset, and the + // backend already has received data at this offset. Typically encountered + // in retry scenarios, and can be ignored. + // + // OUT_OF_RANGE: Returned when the specified offset in the stream is beyond + // the current end of the stream. + // + // INVALID_ARGUMENT: Indicates a malformed request or data. + // + // ABORTED: Request processing is aborted because of prior failures. The + // request can be retried if previous failure is addressed. + // + // INTERNAL: Indicates server side error(s) that can be retried. + google.rpc.Status error = 2; + } + + // If backend detects a schema update, pass it to user so that user can + // use it to input new type of message. It will be empty when no schema + // updates have occurred. + TableSchema updated_schema = 3; +} + +// Request message for `GetWriteStreamRequest`. +message GetWriteStreamRequest { + // Required. Name of the stream to get, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; +} + +// Request message for `BatchCommitWriteStreams`. +message BatchCommitWriteStreamsRequest { + // Required. Parent table that all the streams should belong to, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED + ]; + + // Required. The group of streams that will be committed atomically. + repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Response message for `BatchCommitWriteStreams`. +message BatchCommitWriteStreamsResponse { + // The time at which streams were committed in microseconds granularity. + // This field will only exist when there are no stream errors. + // **Note** if this field is not set, it means the commit was not successful. + google.protobuf.Timestamp commit_time = 1; + + // Stream level error if commit failed. Only streams with error will be in + // the list. + // If empty, there is no error and all streams are committed successfully. + // If non empty, certain streams have errors and ZERO stream is committed due + // to atomicity guarantee. + repeated StorageError stream_errors = 2; +} + +// Request message for invoking `FinalizeWriteStream`. +message FinalizeWriteStreamRequest { + // Required. Name of the stream to finalize, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; +} + +// Response message for `FinalizeWriteStream`. +message FinalizeWriteStreamResponse { + // Number of rows in the finalized stream. + int64 row_count = 1; +} + +// Request message for `FlushRows`. +message FlushRowsRequest { + // Required. The stream that is the target of the flush operation. + string write_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; + + // Ending offset of the flush operation. Rows before this offset(including + // this offset) will be flushed. + google.protobuf.Int64Value offset = 2; +} + +// Respond message for `FlushRows`. +message FlushRowsResponse { + // The rows before this offset (including this offset) are flushed. + int64 offset = 1; +} + +// Structured custom BigQuery Storage error message. The error can be attached +// as error details in the returned rpc Status. In particular, the use of error +// codes allows more structured error handling, and reduces the need to evaluate +// unstructured error text strings. +message StorageError { + // Error code for `StorageError`. + enum StorageErrorCode { + // Default error. + STORAGE_ERROR_CODE_UNSPECIFIED = 0; + + // Table is not found in the system. + TABLE_NOT_FOUND = 1; + + // Stream is already committed. + STREAM_ALREADY_COMMITTED = 2; + + // Stream is not found. + STREAM_NOT_FOUND = 3; + + // Invalid Stream type. + // For example, you try to commit a stream that is not pending. + INVALID_STREAM_TYPE = 4; + + // Invalid Stream state. + // For example, you try to commit a stream that is not finalized or is + // garbaged. + INVALID_STREAM_STATE = 5; + + // Stream is finalized. + STREAM_FINALIZED = 6; + + // There is a schema mismatch and it is caused by user schema has extra + // field than bigquery schema. + SCHEMA_MISMATCH_EXTRA_FIELDS = 7; + } + + // BigQuery Storage specific error code. + StorageErrorCode code = 1; + + // Name of the failed entity. + string entity = 2; + + // Message that describes the error. + string error_message = 3; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 51aeaac4655..dc62e8c00ca 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -20,6 +20,7 @@ import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/bigquery/storage/v1/arrow.proto"; import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/cloud/bigquery/storage/v1/table.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; @@ -146,3 +147,50 @@ message ReadStream { // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } + +// Information about a single stream that gets data inside the storage system. +message WriteStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/WriteStream" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" + }; + + // Type enum of the stream. + enum Type { + // Unknown type. + TYPE_UNSPECIFIED = 0; + + // Data will commit automatically and appear as soon as the write is + // acknowledged. + COMMITTED = 1; + + // Data is invisible until the stream is committed. + PENDING = 2; + + // Data is only visible up to the offset to which it was flushed. + BUFFERED = 3; + } + + // Output only. Name of the stream, in the form + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Type of the stream. + Type type = 2 [(google.api.field_behavior) = IMMUTABLE]; + + // Output only. Create time of the stream. For the _default stream, this is the + // creation_time of the table. + google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Commit time of the stream. + // If a stream is of `COMMITTED` type, then it will have a commit_time same as + // `create_time`. If the stream is of `PENDING` type, empty commit_time + // means it is not committed. + google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The schema of the destination table. It is only returned in + // `CreateWriteStream` response. Caller should generate data that's + // compatible with this schema to send in initial `AppendRowsRequest`. + // The table schema could go out of date during the life time of the stream. + TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto new file mode 100644 index 00000000000..a8c6f844df5 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -0,0 +1,164 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "TableProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Schema of a table. +message TableSchema { + // Describes the fields in a table. + repeated TableFieldSchema fields = 1; +} + +// TableFieldSchema defines a single field/column within a table schema. +message TableFieldSchema { + enum Type { + // Illegal value + TYPE_UNSPECIFIED = 0; + + // 64K, UTF8 + STRING = 1; + + // 64-bit signed + INT64 = 2; + + // 64-bit IEEE floating point + DOUBLE = 3; + + // Aggregate type + STRUCT = 4; + + // 64K, Binary + BYTES = 5; + + // 2-valued + BOOL = 6; + + // 64-bit signed usec since UTC epoch + TIMESTAMP = 7; + + // Civil date - Year, Month, Day + DATE = 8; + + // Civil time - Hour, Minute, Second, Microseconds + TIME = 9; + + // Combination of civil date and civil time + DATETIME = 10; + + // Geography object + GEOGRAPHY = 11; + + // Numeric value + NUMERIC = 12; + + // BigNumeric value + BIGNUMERIC = 13; + + // Interval + INTERVAL = 14; + + // JSON, String + JSON = 15; + } + + enum Mode { + // Illegal value + MODE_UNSPECIFIED = 0; + + NULLABLE = 1; + + REQUIRED = 2; + + REPEATED = 3; + } + + // Required. The field name. The name must contain only letters (a-z, A-Z), + // numbers (0-9), or underscores (_), and must start with a letter or + // underscore. The maximum length is 128 characters. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The field data type. + Type type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The field mode. The default value is NULLABLE. + Mode mode = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Describes the nested schema fields if the type property is set to STRUCT. + repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The field description. The maximum length is 1,024 characters. + string description = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Maximum length of values of this field for STRINGS or BYTES. + // + // If max_length is not specified, no maximum length constraint is imposed + // on this field. + // + // If type = "STRING", then max_length represents the maximum UTF-8 + // length of strings in this field. + // + // If type = "BYTES", then max_length represents the maximum number of + // bytes in this field. + // + // It is invalid to set this field if type is not "STRING" or "BYTES". + int64 max_length = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Precision (maximum number of total digits in base 10) and scale + // (maximum number of digits in the fractional part in base 10) constraints + // for values of this field for NUMERIC or BIGNUMERIC. + // + // It is invalid to set precision or scale if type is not "NUMERIC" or + // "BIGNUMERIC". + // + // If precision and scale are not specified, no value range constraint is + // imposed on this field insofar as values are permitted by the type. + // + // Values of this NUMERIC or BIGNUMERIC field must be in this range when: + // + // * Precision (P) and scale (S) are specified: + // [-10^(P-S) + 10^(-S), 10^(P-S) - 10^(-S)] + // * Precision (P) is specified but not scale (and thus scale is + // interpreted to be equal to zero): + // [-10^P + 1, 10^P - 1]. + // + // Acceptable values for precision and scale if both are specified: + // + // * If type = "NUMERIC": + // 1 <= precision - scale <= 29 and 0 <= scale <= 9. + // * If type = "BIGNUMERIC": + // 1 <= precision - scale <= 38 and 0 <= scale <= 38. + // + // Acceptable values for precision if only precision is specified but not + // scale (and thus scale is interpreted to be equal to zero): + // + // * If type = "NUMERIC": 1 <= precision <= 29. + // * If type = "BIGNUMERIC": 1 <= precision <= 38. + // + // If scale is specified but not precision, then it is invalid. + int64 precision = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. See documentation for precision. + int64 scale = 9 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index bedf6603682..7ec3286e4e5 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -501,6 +501,186 @@ export namespace google { public toJSON(): { [k: string]: any }; } + /** Properties of a ProtoSchema. */ + interface IProtoSchema { + + /** ProtoSchema protoDescriptor */ + protoDescriptor?: (google.protobuf.IDescriptorProto|null); + } + + /** Represents a ProtoSchema. */ + class ProtoSchema implements IProtoSchema { + + /** + * Constructs a new ProtoSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IProtoSchema); + + /** ProtoSchema protoDescriptor. */ + public protoDescriptor?: (google.protobuf.IDescriptorProto|null); + + /** + * Creates a new ProtoSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ProtoSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IProtoSchema): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @param message ProtoSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @param message ProtoSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Verifies a ProtoSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ProtoSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. + * @param message ProtoSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ProtoSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ProtoSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ProtoRows. */ + interface IProtoRows { + + /** ProtoRows serializedRows */ + serializedRows?: (Uint8Array[]|null); + } + + /** Represents a ProtoRows. */ + class ProtoRows implements IProtoRows { + + /** + * Constructs a new ProtoRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IProtoRows); + + /** ProtoRows serializedRows. */ + public serializedRows: Uint8Array[]; + + /** + * Creates a new ProtoRows instance using the specified properties. + * @param [properties] Properties to set + * @returns ProtoRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IProtoRows): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @param message ProtoRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @param message ProtoRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ProtoRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Decodes a ProtoRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Verifies a ProtoRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ProtoRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. + * @param message ProtoRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ProtoRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ProtoRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + /** Represents a BigQueryRead */ class BigQueryRead extends $protobuf.rpc.Service { @@ -588,6 +768,156 @@ export namespace google { type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.SplitReadStreamResponse) => void; } + /** Represents a BigQueryWrite */ + class BigQueryWrite extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryWrite service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryWrite service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryWrite; + + /** + * Calls CreateWriteStream. + * @param request CreateWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and WriteStream + */ + public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback): void; + + /** + * Calls CreateWriteStream. + * @param request CreateWriteStreamRequest message or plain object + * @returns Promise + */ + public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): Promise; + + /** + * Calls AppendRows. + * @param request AppendRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and AppendRowsResponse + */ + public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback): void; + + /** + * Calls AppendRows. + * @param request AppendRowsRequest message or plain object + * @returns Promise + */ + public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest): Promise; + + /** + * Calls GetWriteStream. + * @param request GetWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and WriteStream + */ + public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback): void; + + /** + * Calls GetWriteStream. + * @param request GetWriteStreamRequest message or plain object + * @returns Promise + */ + public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): Promise; + + /** + * Calls FinalizeWriteStream. + * @param request FinalizeWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse + */ + public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback): void; + + /** + * Calls FinalizeWriteStream. + * @param request FinalizeWriteStreamRequest message or plain object + * @returns Promise + */ + public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): Promise; + + /** + * Calls BatchCommitWriteStreams. + * @param request BatchCommitWriteStreamsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse + */ + public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback): void; + + /** + * Calls BatchCommitWriteStreams. + * @param request BatchCommitWriteStreamsRequest message or plain object + * @returns Promise + */ + public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): Promise; + + /** + * Calls FlushRows. + * @param request FlushRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and FlushRowsResponse + */ + public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback): void; + + /** + * Calls FlushRows. + * @param request FlushRowsRequest message or plain object + * @returns Promise + */ + public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest): Promise; + } + + namespace BigQueryWrite { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#createWriteStream}. + * @param error Error, if any + * @param [response] WriteStream + */ + type CreateWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#appendRows}. + * @param error Error, if any + * @param [response] AppendRowsResponse + */ + type AppendRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.AppendRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#getWriteStream}. + * @param error Error, if any + * @param [response] WriteStream + */ + type GetWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#finalizeWriteStream}. + * @param error Error, if any + * @param [response] FinalizeWriteStreamResponse + */ + type FinalizeWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#batchCommitWriteStreams}. + * @param error Error, if any + * @param [response] BatchCommitWriteStreamsResponse + */ + type BatchCommitWriteStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#flushRows}. + * @param error Error, if any + * @param [response] FlushRowsResponse + */ + type FlushRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FlushRowsResponse) => void; + } + /** Properties of a CreateReadSessionRequest. */ interface ICreateReadSessionRequest { @@ -1389,1233 +1719,1280 @@ export namespace google { public toJSON(): { [k: string]: any }; } - /** DataFormat enum. */ - enum DataFormat { - DATA_FORMAT_UNSPECIFIED = 0, - AVRO = 1, - ARROW = 2 - } - - /** Properties of a ReadSession. */ - interface IReadSession { - - /** ReadSession name */ - name?: (string|null); - - /** ReadSession expireTime */ - expireTime?: (google.protobuf.ITimestamp|null); - - /** ReadSession dataFormat */ - dataFormat?: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat|null); - - /** ReadSession avroSchema */ - avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); - - /** ReadSession arrowSchema */ - arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - - /** ReadSession table */ - table?: (string|null); - - /** ReadSession tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); - - /** ReadSession readOptions */ - readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + /** Properties of a CreateWriteStreamRequest. */ + interface ICreateWriteStreamRequest { - /** ReadSession streams */ - streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); + /** CreateWriteStreamRequest parent */ + parent?: (string|null); - /** ReadSession estimatedTotalBytesScanned */ - estimatedTotalBytesScanned?: (number|Long|string|null); + /** CreateWriteStreamRequest writeStream */ + writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); } - /** Represents a ReadSession. */ - class ReadSession implements IReadSession { + /** Represents a CreateWriteStreamRequest. */ + class CreateWriteStreamRequest implements ICreateWriteStreamRequest { /** - * Constructs a new ReadSession. + * Constructs a new CreateWriteStreamRequest. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadSession); + constructor(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest); - /** ReadSession name. */ - public name: string; + /** CreateWriteStreamRequest parent. */ + public parent: string; - /** ReadSession expireTime. */ - public expireTime?: (google.protobuf.ITimestamp|null); + /** CreateWriteStreamRequest writeStream. */ + public writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); - /** ReadSession dataFormat. */ - public dataFormat: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat); + /** + * Creates a new CreateWriteStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateWriteStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - /** ReadSession avroSchema. */ - public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + /** + * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @param message CreateWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - /** ReadSession arrowSchema. */ - public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + /** + * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @param message CreateWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - /** ReadSession table. */ - public table: string; + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - /** ReadSession tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - /** ReadSession readOptions. */ - public readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + /** + * Verifies a CreateWriteStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); - /** ReadSession streams. */ - public streams: google.cloud.bigquery.storage.v1.IReadStream[]; + /** + * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateWriteStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - /** ReadSession estimatedTotalBytesScanned. */ - public estimatedTotalBytesScanned: (number|Long|string); + /** + * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. + * @param message CreateWriteStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** ReadSession schema. */ - public schema?: ("avroSchema"|"arrowSchema"); + /** + * Converts this CreateWriteStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an AppendRowsRequest. */ + interface IAppendRowsRequest { + + /** AppendRowsRequest writeStream */ + writeStream?: (string|null); + + /** AppendRowsRequest offset */ + offset?: (google.protobuf.IInt64Value|null); + + /** AppendRowsRequest protoRows */ + protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + + /** AppendRowsRequest traceId */ + traceId?: (string|null); + } + + /** Represents an AppendRowsRequest. */ + class AppendRowsRequest implements IAppendRowsRequest { /** - * Creates a new ReadSession instance using the specified properties. + * Constructs a new AppendRowsRequest. * @param [properties] Properties to set - * @returns ReadSession instance */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadSession): google.cloud.bigquery.storage.v1.ReadSession; + constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest); + + /** AppendRowsRequest writeStream. */ + public writeStream: string; + + /** AppendRowsRequest offset. */ + public offset?: (google.protobuf.IInt64Value|null); + + /** AppendRowsRequest protoRows. */ + public protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + + /** AppendRowsRequest traceId. */ + public traceId: string; + + /** AppendRowsRequest rows. */ + public rows?: "protoRows"; /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode + * Creates a new AppendRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns AppendRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest): google.cloud.bigquery.storage.v1.AppendRowsRequest; + + /** + * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @param message AppendRowsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode + * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @param message AppendRowsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ReadSession message from the specified reader or buffer. + * Decodes an AppendRowsRequest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ReadSession + * @returns AppendRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest; /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ReadSession + * @returns AppendRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest; /** - * Verifies a ReadSession message. + * Verifies an AppendRowsRequest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ReadSession + * @returns AppendRowsRequest */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest; /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @param message ReadSession + * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. + * @param message AppendRowsRequest * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ReadSession to JSON. + * Converts this AppendRowsRequest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace ReadSession { + namespace AppendRowsRequest { - /** Properties of a TableModifiers. */ - interface ITableModifiers { + /** Properties of a ProtoData. */ + interface IProtoData { - /** TableModifiers snapshotTime */ - snapshotTime?: (google.protobuf.ITimestamp|null); + /** ProtoData writerSchema */ + writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); + + /** ProtoData rows */ + rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); } - /** Represents a TableModifiers. */ - class TableModifiers implements ITableModifiers { + /** Represents a ProtoData. */ + class ProtoData implements IProtoData { /** - * Constructs a new TableModifiers. + * Constructs a new ProtoData. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers); + constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData); - /** TableModifiers snapshotTime. */ - public snapshotTime?: (google.protobuf.ITimestamp|null); + /** ProtoData writerSchema. */ + public writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); + + /** ProtoData rows. */ + public rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); /** - * Creates a new TableModifiers instance using the specified properties. + * Creates a new ProtoData instance using the specified properties. * @param [properties] Properties to set - * @returns TableModifiers instance + * @returns ProtoData instance */ - public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode + * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @param message ProtoData message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode + * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @param message ProtoData message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a TableModifiers message from the specified reader or buffer. + * Decodes a ProtoData message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns TableModifiers + * @returns ProtoData * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * Decodes a ProtoData message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns TableModifiers + * @returns ProtoData * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; /** - * Verifies a TableModifiers message. + * Verifies a ProtoData message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns TableModifiers + * @returns ProtoData */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @param message TableModifiers + * Creates a plain object from a ProtoData message. Also converts values to other types if specified. + * @param message ProtoData * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this TableModifiers to JSON. + * Converts this ProtoData to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } + } - /** Properties of a TableReadOptions. */ - interface ITableReadOptions { + /** Properties of an AppendRowsResponse. */ + interface IAppendRowsResponse { - /** TableReadOptions selectedFields */ - selectedFields?: (string[]|null); + /** AppendRowsResponse appendResult */ + appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); - /** TableReadOptions rowRestriction */ - rowRestriction?: (string|null); + /** AppendRowsResponse error */ + error?: (google.rpc.IStatus|null); - /** TableReadOptions arrowSerializationOptions */ - arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + /** AppendRowsResponse updatedSchema */ + updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + } + + /** Represents an AppendRowsResponse. */ + class AppendRowsResponse implements IAppendRowsResponse { + + /** + * Constructs a new AppendRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse); + + /** AppendRowsResponse appendResult. */ + public appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); + + /** AppendRowsResponse error. */ + public error?: (google.rpc.IStatus|null); + + /** AppendRowsResponse updatedSchema. */ + public updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** AppendRowsResponse response. */ + public response?: ("appendResult"|"error"); + + /** + * Creates a new AppendRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns AppendRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @param message AppendRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @param message AppendRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Verifies an AppendRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AppendRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. + * @param message AppendRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AppendRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace AppendRowsResponse { + + /** Properties of an AppendResult. */ + interface IAppendResult { + + /** AppendResult offset */ + offset?: (google.protobuf.IInt64Value|null); } - /** Represents a TableReadOptions. */ - class TableReadOptions implements ITableReadOptions { + /** Represents an AppendResult. */ + class AppendResult implements IAppendResult { /** - * Constructs a new TableReadOptions. + * Constructs a new AppendResult. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions); - - /** TableReadOptions selectedFields. */ - public selectedFields: string[]; - - /** TableReadOptions rowRestriction. */ - public rowRestriction: string; - - /** TableReadOptions arrowSerializationOptions. */ - public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult); - /** TableReadOptions outputFormatSerializationOptions. */ - public outputFormatSerializationOptions?: "arrowSerializationOptions"; + /** AppendResult offset. */ + public offset?: (google.protobuf.IInt64Value|null); /** - * Creates a new TableReadOptions instance using the specified properties. + * Creates a new AppendResult instance using the specified properties. * @param [properties] Properties to set - * @returns TableReadOptions instance + * @returns AppendResult instance */ - public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode + * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @param message AppendResult message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode + * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @param message AppendResult message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a TableReadOptions message from the specified reader or buffer. + * Decodes an AppendResult message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns TableReadOptions + * @returns AppendResult * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * Decodes an AppendResult message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns TableReadOptions + * @returns AppendResult * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; /** - * Verifies a TableReadOptions message. + * Verifies an AppendResult message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns TableReadOptions + * @returns AppendResult */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @param message TableReadOptions + * Creates a plain object from an AppendResult message. Also converts values to other types if specified. + * @param message AppendResult * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this TableReadOptions to JSON. + * Converts this AppendResult to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } } - /** Properties of a ReadStream. */ - interface IReadStream { + /** Properties of a GetWriteStreamRequest. */ + interface IGetWriteStreamRequest { - /** ReadStream name */ + /** GetWriteStreamRequest name */ name?: (string|null); } - /** Represents a ReadStream. */ - class ReadStream implements IReadStream { + /** Represents a GetWriteStreamRequest. */ + class GetWriteStreamRequest implements IGetWriteStreamRequest { /** - * Constructs a new ReadStream. + * Constructs a new GetWriteStreamRequest. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadStream); + constructor(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest); - /** ReadStream name. */ + /** GetWriteStreamRequest name. */ public name: string; /** - * Creates a new ReadStream instance using the specified properties. + * Creates a new GetWriteStreamRequest instance using the specified properties. * @param [properties] Properties to set - * @returns ReadStream instance + * @returns GetWriteStreamRequest instance */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadStream): google.cloud.bigquery.storage.v1.ReadStream; + public static create(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; /** - * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @param message ReadStream message or plain object to encode + * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @param message GetWriteStreamRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @param message ReadStream message or plain object to encode + * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @param message GetWriteStreamRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ReadStream message from the specified reader or buffer. + * Decodes a GetWriteStreamRequest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ReadStream + * @returns GetWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadStream; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; /** - * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ReadStream + * @returns GetWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadStream; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; /** - * Verifies a ReadStream message. + * Verifies a GetWriteStreamRequest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ReadStream + * @returns GetWriteStreamRequest */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadStream; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; /** - * Creates a plain object from a ReadStream message. Also converts values to other types if specified. - * @param message ReadStream + * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. + * @param message GetWriteStreamRequest * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.GetWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ReadStream to JSON. + * Converts this GetWriteStreamRequest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - } - /** Namespace v1beta1. */ - namespace v1beta1 { + /** Properties of a BatchCommitWriteStreamsRequest. */ + interface IBatchCommitWriteStreamsRequest { - /** Properties of an ArrowSchema. */ - interface IArrowSchema { + /** BatchCommitWriteStreamsRequest parent */ + parent?: (string|null); - /** ArrowSchema serializedSchema */ - serializedSchema?: (Uint8Array|string|null); + /** BatchCommitWriteStreamsRequest writeStreams */ + writeStreams?: (string[]|null); } - /** Represents an ArrowSchema. */ - class ArrowSchema implements IArrowSchema { + /** Represents a BatchCommitWriteStreamsRequest. */ + class BatchCommitWriteStreamsRequest implements IBatchCommitWriteStreamsRequest { /** - * Constructs a new ArrowSchema. + * Constructs a new BatchCommitWriteStreamsRequest. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); + constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest); - /** ArrowSchema serializedSchema. */ - public serializedSchema: (Uint8Array|string); + /** BatchCommitWriteStreamsRequest parent. */ + public parent: string; + + /** BatchCommitWriteStreamsRequest writeStreams. */ + public writeStreams: string[]; /** - * Creates a new ArrowSchema instance using the specified properties. + * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. * @param [properties] Properties to set - * @returns ArrowSchema instance + * @returns BatchCommitWriteStreamsRequest instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode + * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @param message BatchCommitWriteStreamsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode + * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @param message BatchCommitWriteStreamsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an ArrowSchema message from the specified reader or buffer. + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ArrowSchema + * @returns BatchCommitWriteStreamsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ArrowSchema + * @returns BatchCommitWriteStreamsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; /** - * Verifies an ArrowSchema message. + * Verifies a BatchCommitWriteStreamsRequest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ArrowSchema + * @returns BatchCommitWriteStreamsRequest */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. - * @param message ArrowSchema + * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. + * @param message BatchCommitWriteStreamsRequest * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ArrowSchema to JSON. + * Converts this BatchCommitWriteStreamsRequest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of an ArrowRecordBatch. */ - interface IArrowRecordBatch { + /** Properties of a BatchCommitWriteStreamsResponse. */ + interface IBatchCommitWriteStreamsResponse { - /** ArrowRecordBatch serializedRecordBatch */ - serializedRecordBatch?: (Uint8Array|string|null); + /** BatchCommitWriteStreamsResponse commitTime */ + commitTime?: (google.protobuf.ITimestamp|null); - /** ArrowRecordBatch rowCount */ - rowCount?: (number|Long|string|null); + /** BatchCommitWriteStreamsResponse streamErrors */ + streamErrors?: (google.cloud.bigquery.storage.v1.IStorageError[]|null); } - /** Represents an ArrowRecordBatch. */ - class ArrowRecordBatch implements IArrowRecordBatch { + /** Represents a BatchCommitWriteStreamsResponse. */ + class BatchCommitWriteStreamsResponse implements IBatchCommitWriteStreamsResponse { /** - * Constructs a new ArrowRecordBatch. + * Constructs a new BatchCommitWriteStreamsResponse. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); + constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse); - /** ArrowRecordBatch serializedRecordBatch. */ - public serializedRecordBatch: (Uint8Array|string); + /** BatchCommitWriteStreamsResponse commitTime. */ + public commitTime?: (google.protobuf.ITimestamp|null); - /** ArrowRecordBatch rowCount. */ - public rowCount: (number|Long|string); + /** BatchCommitWriteStreamsResponse streamErrors. */ + public streamErrors: google.cloud.bigquery.storage.v1.IStorageError[]; /** - * Creates a new ArrowRecordBatch instance using the specified properties. + * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. * @param [properties] Properties to set - * @returns ArrowRecordBatch instance + * @returns BatchCommitWriteStreamsResponse instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode + * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @param message BatchCommitWriteStreamsResponse message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode + * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @param message BatchCommitWriteStreamsResponse message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ArrowRecordBatch + * @returns BatchCommitWriteStreamsResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ArrowRecordBatch + * @returns BatchCommitWriteStreamsResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; /** - * Verifies an ArrowRecordBatch message. + * Verifies a BatchCommitWriteStreamsResponse message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ArrowRecordBatch + * @returns BatchCommitWriteStreamsResponse */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. - * @param message ArrowRecordBatch + * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. + * @param message BatchCommitWriteStreamsResponse * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ArrowRecordBatch to JSON. + * Converts this BatchCommitWriteStreamsResponse to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of an AvroSchema. */ - interface IAvroSchema { + /** Properties of a FinalizeWriteStreamRequest. */ + interface IFinalizeWriteStreamRequest { - /** AvroSchema schema */ - schema?: (string|null); + /** FinalizeWriteStreamRequest name */ + name?: (string|null); } - /** Represents an AvroSchema. */ - class AvroSchema implements IAvroSchema { + /** Represents a FinalizeWriteStreamRequest. */ + class FinalizeWriteStreamRequest implements IFinalizeWriteStreamRequest { /** - * Constructs a new AvroSchema. + * Constructs a new FinalizeWriteStreamRequest. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema); + constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest); - /** AvroSchema schema. */ - public schema: string; + /** FinalizeWriteStreamRequest name. */ + public name: string; /** - * Creates a new AvroSchema instance using the specified properties. + * Creates a new FinalizeWriteStreamRequest instance using the specified properties. * @param [properties] Properties to set - * @returns AvroSchema instance + * @returns FinalizeWriteStreamRequest instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema): google.cloud.bigquery.storage.v1beta1.AvroSchema; + public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode + * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @param message FinalizeWriteStreamRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode + * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @param message FinalizeWriteStreamRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an AvroSchema message from the specified reader or buffer. + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns AvroSchema + * @returns FinalizeWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroSchema; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns AvroSchema + * @returns FinalizeWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroSchema; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; /** - * Verifies an AvroSchema message. + * Verifies a FinalizeWriteStreamRequest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns AvroSchema + * @returns FinalizeWriteStreamRequest */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroSchema; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. - * @param message AvroSchema + * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. + * @param message FinalizeWriteStreamRequest * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this AvroSchema to JSON. + * Converts this FinalizeWriteStreamRequest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of an AvroRows. */ - interface IAvroRows { - - /** AvroRows serializedBinaryRows */ - serializedBinaryRows?: (Uint8Array|string|null); + /** Properties of a FinalizeWriteStreamResponse. */ + interface IFinalizeWriteStreamResponse { - /** AvroRows rowCount */ + /** FinalizeWriteStreamResponse rowCount */ rowCount?: (number|Long|string|null); } - /** Represents an AvroRows. */ - class AvroRows implements IAvroRows { + /** Represents a FinalizeWriteStreamResponse. */ + class FinalizeWriteStreamResponse implements IFinalizeWriteStreamResponse { /** - * Constructs a new AvroRows. + * Constructs a new FinalizeWriteStreamResponse. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); - - /** AvroRows serializedBinaryRows. */ - public serializedBinaryRows: (Uint8Array|string); + constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse); - /** AvroRows rowCount. */ + /** FinalizeWriteStreamResponse rowCount. */ public rowCount: (number|Long|string); /** - * Creates a new AvroRows instance using the specified properties. + * Creates a new FinalizeWriteStreamResponse instance using the specified properties. * @param [properties] Properties to set - * @returns AvroRows instance + * @returns FinalizeWriteStreamResponse instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows): google.cloud.bigquery.storage.v1beta1.AvroRows; + public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode + * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @param message FinalizeWriteStreamResponse message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode + * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @param message FinalizeWriteStreamResponse message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an AvroRows message from the specified reader or buffer. + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns AvroRows + * @returns FinalizeWriteStreamResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroRows; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns AvroRows + * @returns FinalizeWriteStreamResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroRows; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; /** - * Verifies an AvroRows message. + * Verifies a FinalizeWriteStreamResponse message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns AvroRows + * @returns FinalizeWriteStreamResponse */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroRows; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. - * @param message AvroRows + * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. + * @param message FinalizeWriteStreamResponse * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this AvroRows to JSON. + * Converts this FinalizeWriteStreamResponse to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a TableReadOptions. */ - interface ITableReadOptions { + /** Properties of a FlushRowsRequest. */ + interface IFlushRowsRequest { - /** TableReadOptions selectedFields */ - selectedFields?: (string[]|null); + /** FlushRowsRequest writeStream */ + writeStream?: (string|null); - /** TableReadOptions rowRestriction */ - rowRestriction?: (string|null); + /** FlushRowsRequest offset */ + offset?: (google.protobuf.IInt64Value|null); } - /** Represents a TableReadOptions. */ - class TableReadOptions implements ITableReadOptions { + /** Represents a FlushRowsRequest. */ + class FlushRowsRequest implements IFlushRowsRequest { /** - * Constructs a new TableReadOptions. + * Constructs a new FlushRowsRequest. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions); + constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest); - /** TableReadOptions selectedFields. */ - public selectedFields: string[]; + /** FlushRowsRequest writeStream. */ + public writeStream: string; - /** TableReadOptions rowRestriction. */ - public rowRestriction: string; + /** FlushRowsRequest offset. */ + public offset?: (google.protobuf.IInt64Value|null); /** - * Creates a new TableReadOptions instance using the specified properties. + * Creates a new FlushRowsRequest instance using the specified properties. * @param [properties] Properties to set - * @returns TableReadOptions instance + * @returns FlushRowsRequest instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest): google.cloud.bigquery.storage.v1.FlushRowsRequest; /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode + * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @param message FlushRowsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode + * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @param message FlushRowsRequest message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a TableReadOptions message from the specified reader or buffer. + * Decodes a FlushRowsRequest message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns TableReadOptions + * @returns FlushRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsRequest; /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns TableReadOptions + * @returns FlushRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsRequest; /** - * Verifies a TableReadOptions message. + * Verifies a FlushRowsRequest message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns TableReadOptions + * @returns FlushRowsRequest */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsRequest; /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @param message TableReadOptions + * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. + * @param message FlushRowsRequest * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this TableReadOptions to JSON. + * Converts this FlushRowsRequest to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Represents a BigQueryStorage */ - class BigQueryStorage extends $protobuf.rpc.Service { + /** Properties of a FlushRowsResponse. */ + interface IFlushRowsResponse { + + /** FlushRowsResponse offset */ + offset?: (number|Long|string|null); + } + + /** Represents a FlushRowsResponse. */ + class FlushRowsResponse implements IFlushRowsResponse { /** - * Constructs a new BigQueryStorage service. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited + * Constructs a new FlushRowsResponse. + * @param [properties] Properties to set */ - constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse); + + /** FlushRowsResponse offset. */ + public offset: (number|Long|string); /** - * Creates new BigQueryStorage service using the specified rpc implementation. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - * @returns RPC service. Useful where requests and/or responses are streamed. + * Creates a new FlushRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns FlushRowsResponse instance */ - public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryStorage; + public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse): google.cloud.bigquery.storage.v1.FlushRowsResponse; /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadSession + * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @param message FlushRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer */ - public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback): void; + public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @returns Promise + * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @param message FlushRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer */ - public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): Promise; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + * Decodes a FlushRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback): void; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsResponse; /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @returns Promise + * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): Promise; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsResponse; /** - * Calls BatchCreateReadSessionStreams. - * @param request BatchCreateReadSessionStreamsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + * Verifies a FlushRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not */ - public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback): void; + public static verify(message: { [k: string]: any }): (string|null); /** - * Calls BatchCreateReadSessionStreams. - * @param request BatchCreateReadSessionStreamsRequest message or plain object - * @returns Promise + * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FlushRowsResponse */ - public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): Promise; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsResponse; /** - * Calls FinalizeStream. - * @param request FinalizeStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and Empty + * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. + * @param message FlushRowsResponse + * @param [options] Conversion options + * @returns Plain object */ - public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback): void; + public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Calls FinalizeStream. - * @param request FinalizeStreamRequest message or plain object - * @returns Promise - */ - public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): Promise; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse - */ - public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback): void; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @returns Promise + * Converts this FlushRowsResponse to JSON. + * @returns JSON object */ - public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): Promise; + public toJSON(): { [k: string]: any }; } - namespace BigQueryStorage { - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. - * @param error Error, if any - * @param [response] ReadSession - */ - type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. - * @param error Error, if any - * @param [response] ReadRowsResponse - */ - type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. - * @param error Error, if any - * @param [response] BatchCreateReadSessionStreamsResponse - */ - type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. - * @param error Error, if any - * @param [response] Empty - */ - type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + /** Properties of a StorageError. */ + interface IStorageError { - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. - * @param error Error, if any - * @param [response] SplitReadStreamResponse - */ - type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) => void; - } + /** StorageError code */ + code?: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null); - /** Properties of a Stream. */ - interface IStream { + /** StorageError entity */ + entity?: (string|null); - /** Stream name */ - name?: (string|null); + /** StorageError errorMessage */ + errorMessage?: (string|null); } - /** Represents a Stream. */ - class Stream implements IStream { + /** Represents a StorageError. */ + class StorageError implements IStorageError { /** - * Constructs a new Stream. + * Constructs a new StorageError. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStream); + constructor(properties?: google.cloud.bigquery.storage.v1.IStorageError); - /** Stream name. */ - public name: string; + /** StorageError code. */ + public code: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode); + + /** StorageError entity. */ + public entity: string; + + /** StorageError errorMessage. */ + public errorMessage: string; /** - * Creates a new Stream instance using the specified properties. + * Creates a new StorageError instance using the specified properties. * @param [properties] Properties to set - * @returns Stream instance + * @returns StorageError instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStream): google.cloud.bigquery.storage.v1beta1.Stream; + public static create(properties?: google.cloud.bigquery.storage.v1.IStorageError): google.cloud.bigquery.storage.v1.StorageError; /** - * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @param message Stream message or plain object to encode + * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @param message StorageError message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @param message Stream message or plain object to encode + * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @param message StorageError message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a Stream message from the specified reader or buffer. + * Decodes a StorageError message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns Stream + * @returns StorageError * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Stream; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StorageError; /** - * Decodes a Stream message from the specified reader or buffer, length delimited. + * Decodes a StorageError message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns Stream + * @returns StorageError * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Stream; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StorageError; /** - * Verifies a Stream message. + * Verifies a StorageError message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * Creates a StorageError message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns Stream + * @returns StorageError */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Stream; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StorageError; /** - * Creates a plain object from a Stream message. Also converts values to other types if specified. - * @param message Stream + * Creates a plain object from a StorageError message. Also converts values to other types if specified. + * @param message StorageError * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.Stream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.StorageError, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this Stream to JSON. + * Converts this StorageError to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a StreamPosition. */ - interface IStreamPosition { - - /** StreamPosition stream */ - stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** StreamPosition offset */ - offset?: (number|Long|string|null); + namespace StorageError { + + /** StorageErrorCode enum. */ + enum StorageErrorCode { + STORAGE_ERROR_CODE_UNSPECIFIED = 0, + TABLE_NOT_FOUND = 1, + STREAM_ALREADY_COMMITTED = 2, + STREAM_NOT_FOUND = 3, + INVALID_STREAM_TYPE = 4, + INVALID_STREAM_STATE = 5, + STREAM_FINALIZED = 6, + SCHEMA_MISMATCH_EXTRA_FIELDS = 7 + } } - /** Represents a StreamPosition. */ - class StreamPosition implements IStreamPosition { - - /** - * Constructs a new StreamPosition. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition); - - /** StreamPosition stream. */ - public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** StreamPosition offset. */ - public offset: (number|Long|string); - - /** - * Creates a new StreamPosition instance using the specified properties. - * @param [properties] Properties to set - * @returns StreamPosition instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @param message StreamPosition message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @param message StreamPosition message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StreamPosition message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Decodes a StreamPosition message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Verifies a StreamPosition message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StreamPosition - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. - * @param message StreamPosition - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamPosition, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StreamPosition to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 2 } /** Properties of a ReadSession. */ @@ -2627,23 +3004,29 @@ export namespace google { /** ReadSession expireTime */ expireTime?: (google.protobuf.ITimestamp|null); + /** ReadSession dataFormat */ + dataFormat?: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat|null); + /** ReadSession avroSchema */ - avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); /** ReadSession arrowSchema */ - arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); - - /** ReadSession streams */ - streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - /** ReadSession tableReference */ - tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + /** ReadSession table */ + table?: (string|null); /** ReadSession tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); - /** ReadSession shardingStrategy */ - shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + /** ReadSession readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); + + /** ReadSession estimatedTotalBytesScanned */ + estimatedTotalBytesScanned?: (number|Long|string|null); } /** Represents a ReadSession. */ @@ -2653,7 +3036,7 @@ export namespace google { * Constructs a new ReadSession. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession); + constructor(properties?: google.cloud.bigquery.storage.v1.IReadSession); /** ReadSession name. */ public name: string; @@ -2661,23 +3044,29 @@ export namespace google { /** ReadSession expireTime. */ public expireTime?: (google.protobuf.ITimestamp|null); + /** ReadSession dataFormat. */ + public dataFormat: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat); + /** ReadSession avroSchema. */ - public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); /** ReadSession arrowSchema. */ - public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); - - /** ReadSession streams. */ - public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - /** ReadSession tableReference. */ - public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + /** ReadSession table. */ + public table: string; /** ReadSession tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + public tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); - /** ReadSession shardingStrategy. */ - public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + /** ReadSession readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1.IReadStream[]; + + /** ReadSession estimatedTotalBytesScanned. */ + public estimatedTotalBytesScanned: (number|Long|string); /** ReadSession schema. */ public schema?: ("avroSchema"|"arrowSchema"); @@ -2687,23 +3076,23 @@ export namespace google { * @param [properties] Properties to set * @returns ReadSession instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession): google.cloud.bigquery.storage.v1beta1.ReadSession; + public static create(properties?: google.cloud.bigquery.storage.v1.IReadSession): google.cloud.bigquery.storage.v1.ReadSession; /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. * @param message ReadSession message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. * @param message ReadSession message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; /** * Decodes a ReadSession message from the specified reader or buffer. @@ -2713,7 +3102,7 @@ export namespace google { * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadSession; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession; /** * Decodes a ReadSession message from the specified reader or buffer, length delimited. @@ -2722,7 +3111,7 @@ export namespace google { * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadSession; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession; /** * Verifies a ReadSession message. @@ -2736,7 +3125,7 @@ export namespace google { * @param object Plain object * @returns ReadSession */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadSession; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession; /** * Creates a plain object from a ReadSession message. Also converts values to other types if specified. @@ -2744,7 +3133,7 @@ export namespace google { * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** * Converts this ReadSession to JSON. @@ -2753,5207 +3142,7807 @@ export namespace google { public toJSON(): { [k: string]: any }; } - /** Properties of a CreateReadSessionRequest. */ - interface ICreateReadSessionRequest { + namespace ReadSession { - /** CreateReadSessionRequest tableReference */ - tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + /** Properties of a TableModifiers. */ + interface ITableModifiers { - /** CreateReadSessionRequest parent */ - parent?: (string|null); + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } - /** CreateReadSessionRequest tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { - /** CreateReadSessionRequest requestedStreams */ - requestedStreams?: (number|null); + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers); - /** CreateReadSessionRequest readOptions */ - readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); - /** CreateReadSessionRequest format */ - format?: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat|null); + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - /** CreateReadSessionRequest shardingStrategy */ - shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); - } + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - /** Represents a CreateReadSessionRequest. */ - class CreateReadSessionRequest implements ICreateReadSessionRequest { + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - /** - * Constructs a new CreateReadSessionRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest); + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - /** CreateReadSessionRequest tableReference. */ - public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - /** CreateReadSessionRequest parent. */ - public parent: string; + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); - /** CreateReadSessionRequest tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - /** CreateReadSessionRequest requestedStreams. */ - public requestedStreams: number; + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** CreateReadSessionRequest readOptions. */ - public readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } - /** CreateReadSessionRequest format. */ - public format: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat); + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { - /** CreateReadSessionRequest shardingStrategy. */ - public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); + + /** TableReadOptions arrowSerializationOptions */ + arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + } + + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { + + /** + * Constructs a new TableReadOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions); + + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; + + /** TableReadOptions arrowSerializationOptions. */ + public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + + /** TableReadOptions outputFormatSerializationOptions. */ + public outputFormatSerializationOptions?: "arrowSerializationOptions"; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReadOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Verifies a TableReadOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReadOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReadOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of a ReadStream. */ + interface IReadStream { + + /** ReadStream name */ + name?: (string|null); + } + + /** Represents a ReadStream. */ + class ReadStream implements IReadStream { /** - * Creates a new CreateReadSessionRequest instance using the specified properties. + * Constructs a new ReadStream. * @param [properties] Properties to set - * @returns CreateReadSessionRequest instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + constructor(properties?: google.cloud.bigquery.storage.v1.IReadStream); + + /** ReadStream name. */ + public name: string; /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer + * Creates a new ReadStream instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadStream instance */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static create(properties?: google.cloud.bigquery.storage.v1.IReadStream): google.cloud.bigquery.storage.v1.ReadStream; /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadStream message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns CreateReadSessionRequest + * @returns ReadStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadStream; /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * Decodes a ReadStream message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns CreateReadSessionRequest + * @returns ReadStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadStream; /** - * Verifies a CreateReadSessionRequest message. + * Verifies a ReadStream message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns CreateReadSessionRequest + * @returns ReadStream */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadStream; /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. - * @param message CreateReadSessionRequest + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @param message ReadStream * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this CreateReadSessionRequest to JSON. + * Converts this ReadStream to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** DataFormat enum. */ - enum DataFormat { - DATA_FORMAT_UNSPECIFIED = 0, - AVRO = 1, - ARROW = 3 - } + /** Properties of a WriteStream. */ + interface IWriteStream { - /** ShardingStrategy enum. */ - enum ShardingStrategy { - SHARDING_STRATEGY_UNSPECIFIED = 0, - LIQUID = 1, - BALANCED = 2 - } + /** WriteStream name */ + name?: (string|null); - /** Properties of a ReadRowsRequest. */ - interface IReadRowsRequest { + /** WriteStream type */ + type?: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type|null); - /** ReadRowsRequest readPosition */ - readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + /** WriteStream createTime */ + createTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream commitTime */ + commitTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream tableSchema */ + tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); } - /** Represents a ReadRowsRequest. */ - class ReadRowsRequest implements IReadRowsRequest { + /** Represents a WriteStream. */ + class WriteStream implements IWriteStream { /** - * Constructs a new ReadRowsRequest. + * Constructs a new WriteStream. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest); + constructor(properties?: google.cloud.bigquery.storage.v1.IWriteStream); - /** ReadRowsRequest readPosition. */ - public readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + /** WriteStream name. */ + public name: string; + + /** WriteStream type. */ + public type: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type); + + /** WriteStream createTime. */ + public createTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream commitTime. */ + public commitTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream tableSchema. */ + public tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); /** - * Creates a new ReadRowsRequest instance using the specified properties. + * Creates a new WriteStream instance using the specified properties. * @param [properties] Properties to set - * @returns ReadRowsRequest instance + * @returns WriteStream instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + public static create(properties?: google.cloud.bigquery.storage.v1.IWriteStream): google.cloud.bigquery.storage.v1.WriteStream; /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode + * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @param message WriteStream message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode + * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @param message WriteStream message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. + * Decodes a WriteStream message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ReadRowsRequest + * @returns WriteStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.WriteStream; /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * Decodes a WriteStream message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ReadRowsRequest + * @returns WriteStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.WriteStream; /** - * Verifies a ReadRowsRequest message. + * Verifies a WriteStream message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ReadRowsRequest + * @returns WriteStream */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.WriteStream; /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. - * @param message ReadRowsRequest + * Creates a plain object from a WriteStream message. Also converts values to other types if specified. + * @param message WriteStream * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.WriteStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ReadRowsRequest to JSON. + * Converts this WriteStream to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a StreamStatus. */ - interface IStreamStatus { - - /** StreamStatus estimatedRowCount */ - estimatedRowCount?: (number|Long|string|null); + namespace WriteStream { - /** StreamStatus fractionConsumed */ - fractionConsumed?: (number|null); + /** Type enum. */ + enum Type { + TYPE_UNSPECIFIED = 0, + COMMITTED = 1, + PENDING = 2, + BUFFERED = 3 + } + } - /** StreamStatus progress */ - progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + /** Properties of a TableSchema. */ + interface ITableSchema { - /** StreamStatus isSplittable */ - isSplittable?: (boolean|null); + /** TableSchema fields */ + fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); } - /** Represents a StreamStatus. */ - class StreamStatus implements IStreamStatus { + /** Represents a TableSchema. */ + class TableSchema implements ITableSchema { /** - * Constructs a new StreamStatus. + * Constructs a new TableSchema. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus); - - /** StreamStatus estimatedRowCount. */ - public estimatedRowCount: (number|Long|string); - - /** StreamStatus fractionConsumed. */ - public fractionConsumed: number; + constructor(properties?: google.cloud.bigquery.storage.v1.ITableSchema); - /** StreamStatus progress. */ - public progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); - - /** StreamStatus isSplittable. */ - public isSplittable: boolean; + /** TableSchema fields. */ + public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; /** - * Creates a new StreamStatus instance using the specified properties. + * Creates a new TableSchema instance using the specified properties. * @param [properties] Properties to set - * @returns StreamStatus instance + * @returns TableSchema instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus): google.cloud.bigquery.storage.v1beta1.StreamStatus; + public static create(properties?: google.cloud.bigquery.storage.v1.ITableSchema): google.cloud.bigquery.storage.v1.TableSchema; /** - * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @param message StreamStatus message or plain object to encode + * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @param message TableSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @param message StreamStatus message or plain object to encode + * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @param message TableSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a StreamStatus message from the specified reader or buffer. + * Decodes a TableSchema message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns StreamStatus + * @returns TableSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamStatus; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableSchema; /** - * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * Decodes a TableSchema message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns StreamStatus + * @returns TableSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamStatus; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableSchema; /** - * Verifies a StreamStatus message. + * Verifies a TableSchema message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns StreamStatus + * @returns TableSchema */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamStatus; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableSchema; /** - * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. - * @param message StreamStatus + * Creates a plain object from a TableSchema message. Also converts values to other types if specified. + * @param message TableSchema * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.TableSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this StreamStatus to JSON. + * Converts this TableSchema to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a Progress. */ - interface IProgress { + /** Properties of a TableFieldSchema. */ + interface ITableFieldSchema { - /** Progress atResponseStart */ - atResponseStart?: (number|null); + /** TableFieldSchema name */ + name?: (string|null); - /** Progress atResponseEnd */ - atResponseEnd?: (number|null); + /** TableFieldSchema type */ + type?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null); + + /** TableFieldSchema mode */ + mode?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null); + + /** TableFieldSchema fields */ + fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); + + /** TableFieldSchema description */ + description?: (string|null); + + /** TableFieldSchema maxLength */ + maxLength?: (number|Long|string|null); + + /** TableFieldSchema precision */ + precision?: (number|Long|string|null); + + /** TableFieldSchema scale */ + scale?: (number|Long|string|null); } - /** Represents a Progress. */ - class Progress implements IProgress { + /** Represents a TableFieldSchema. */ + class TableFieldSchema implements ITableFieldSchema { /** - * Constructs a new Progress. + * Constructs a new TableFieldSchema. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IProgress); + constructor(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema); - /** Progress atResponseStart. */ - public atResponseStart: number; + /** TableFieldSchema name. */ + public name: string; - /** Progress atResponseEnd. */ - public atResponseEnd: number; + /** TableFieldSchema type. */ + public type: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type); + + /** TableFieldSchema mode. */ + public mode: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode); + + /** TableFieldSchema fields. */ + public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; + + /** TableFieldSchema description. */ + public description: string; + + /** TableFieldSchema maxLength. */ + public maxLength: (number|Long|string); + + /** TableFieldSchema precision. */ + public precision: (number|Long|string); + + /** TableFieldSchema scale. */ + public scale: (number|Long|string); /** - * Creates a new Progress instance using the specified properties. + * Creates a new TableFieldSchema instance using the specified properties. * @param [properties] Properties to set - * @returns Progress instance + * @returns TableFieldSchema instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IProgress): google.cloud.bigquery.storage.v1beta1.Progress; + public static create(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema): google.cloud.bigquery.storage.v1.TableFieldSchema; /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode + * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @param message TableFieldSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode + * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @param message TableFieldSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a Progress message from the specified reader or buffer. + * Decodes a TableFieldSchema message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns Progress + * @returns TableFieldSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Progress; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableFieldSchema; /** - * Decodes a Progress message from the specified reader or buffer, length delimited. + * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns Progress + * @returns TableFieldSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Progress; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableFieldSchema; /** - * Verifies a Progress message. + * Verifies a TableFieldSchema message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns Progress + * @returns TableFieldSchema */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Progress; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableFieldSchema; /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. - * @param message Progress + * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. + * @param message TableFieldSchema * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1.TableFieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this Progress to JSON. + * Converts this TableFieldSchema to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a ThrottleStatus. */ - interface IThrottleStatus { + namespace TableFieldSchema { + + /** Type enum. */ + enum Type { + TYPE_UNSPECIFIED = 0, + STRING = 1, + INT64 = 2, + DOUBLE = 3, + STRUCT = 4, + BYTES = 5, + BOOL = 6, + TIMESTAMP = 7, + DATE = 8, + TIME = 9, + DATETIME = 10, + GEOGRAPHY = 11, + NUMERIC = 12, + BIGNUMERIC = 13, + INTERVAL = 14, + JSON = 15 + } - /** ThrottleStatus throttlePercent */ - throttlePercent?: (number|null); + /** Mode enum. */ + enum Mode { + MODE_UNSPECIFIED = 0, + NULLABLE = 1, + REQUIRED = 2, + REPEATED = 3 + } } + } - /** Represents a ThrottleStatus. */ - class ThrottleStatus implements IThrottleStatus { + /** Namespace v1beta1. */ + namespace v1beta1 { + + /** Properties of an ArrowSchema. */ + interface IArrowSchema { + + /** ArrowSchema serializedSchema */ + serializedSchema?: (Uint8Array|string|null); + } + + /** Represents an ArrowSchema. */ + class ArrowSchema implements IArrowSchema { /** - * Constructs a new ThrottleStatus. + * Constructs a new ArrowSchema. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); - /** ThrottleStatus throttlePercent. */ - public throttlePercent: number; + /** ArrowSchema serializedSchema. */ + public serializedSchema: (Uint8Array|string); /** - * Creates a new ThrottleStatus instance using the specified properties. + * Creates a new ArrowSchema instance using the specified properties. * @param [properties] Properties to set - * @returns ThrottleStatus instance + * @returns ArrowSchema instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema): google.cloud.bigquery.storage.v1beta1.ArrowSchema; /** - * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @param message ThrottleStatus message or plain object to encode + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @param message ThrottleStatus message or plain object to encode + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ThrottleStatus message from the specified reader or buffer. + * Decodes an ArrowSchema message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ThrottleStatus + * @returns ArrowSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowSchema; /** - * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ThrottleStatus + * @returns ArrowSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowSchema; /** - * Verifies a ThrottleStatus message. + * Verifies an ArrowSchema message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ThrottleStatus + * @returns ArrowSchema */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowSchema; /** - * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. - * @param message ThrottleStatus + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @param message ArrowSchema * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ThrottleStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ThrottleStatus to JSON. + * Converts this ArrowSchema to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a ReadRowsResponse. */ - interface IReadRowsResponse { - - /** ReadRowsResponse avroRows */ - avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + /** Properties of an ArrowRecordBatch. */ + interface IArrowRecordBatch { - /** ReadRowsResponse arrowRecordBatch */ - arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + /** ArrowRecordBatch serializedRecordBatch */ + serializedRecordBatch?: (Uint8Array|string|null); - /** ReadRowsResponse rowCount */ + /** ArrowRecordBatch rowCount */ rowCount?: (number|Long|string|null); - - /** ReadRowsResponse status */ - status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); - - /** ReadRowsResponse throttleStatus */ - throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); } - /** Represents a ReadRowsResponse. */ - class ReadRowsResponse implements IReadRowsResponse { + /** Represents an ArrowRecordBatch. */ + class ArrowRecordBatch implements IArrowRecordBatch { /** - * Constructs a new ReadRowsResponse. + * Constructs a new ArrowRecordBatch. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse); - - /** ReadRowsResponse avroRows. */ - public avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); - /** ReadRowsResponse arrowRecordBatch. */ - public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + /** ArrowRecordBatch serializedRecordBatch. */ + public serializedRecordBatch: (Uint8Array|string); - /** ReadRowsResponse rowCount. */ + /** ArrowRecordBatch rowCount. */ public rowCount: (number|Long|string); - /** ReadRowsResponse status. */ - public status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); - - /** ReadRowsResponse throttleStatus. */ - public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); - - /** ReadRowsResponse rows. */ - public rows?: ("avroRows"|"arrowRecordBatch"); - /** - * Creates a new ReadRowsResponse instance using the specified properties. + * Creates a new ArrowRecordBatch instance using the specified properties. * @param [properties] Properties to set - * @returns ReadRowsResponse instance + * @returns ArrowRecordBatch instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. + * Decodes an ArrowRecordBatch message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ReadRowsResponse + * @returns ArrowRecordBatch * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ReadRowsResponse + * @returns ArrowRecordBatch * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; /** - * Verifies a ReadRowsResponse message. + * Verifies an ArrowRecordBatch message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ReadRowsResponse + * @returns ArrowRecordBatch */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. - * @param message ReadRowsResponse + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @param message ArrowRecordBatch * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ReadRowsResponse to JSON. + * Converts this ArrowRecordBatch to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a BatchCreateReadSessionStreamsRequest. */ - interface IBatchCreateReadSessionStreamsRequest { - - /** BatchCreateReadSessionStreamsRequest session */ - session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + /** Properties of an AvroSchema. */ + interface IAvroSchema { - /** BatchCreateReadSessionStreamsRequest requestedStreams */ - requestedStreams?: (number|null); + /** AvroSchema schema */ + schema?: (string|null); } - /** Represents a BatchCreateReadSessionStreamsRequest. */ - class BatchCreateReadSessionStreamsRequest implements IBatchCreateReadSessionStreamsRequest { + /** Represents an AvroSchema. */ + class AvroSchema implements IAvroSchema { /** - * Constructs a new BatchCreateReadSessionStreamsRequest. + * Constructs a new AvroSchema. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest); - - /** BatchCreateReadSessionStreamsRequest session. */ - public session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema); - /** BatchCreateReadSessionStreamsRequest requestedStreams. */ - public requestedStreams: number; + /** AvroSchema schema. */ + public schema: string; /** - * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * Creates a new AvroSchema instance using the specified properties. * @param [properties] Properties to set - * @returns BatchCreateReadSessionStreamsRequest instance + * @returns AvroSchema instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema): google.cloud.bigquery.storage.v1beta1.AvroSchema; /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * Decodes an AvroSchema message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns BatchCreateReadSessionStreamsRequest + * @returns AvroSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroSchema; /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns BatchCreateReadSessionStreamsRequest + * @returns AvroSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroSchema; /** - * Verifies a BatchCreateReadSessionStreamsRequest message. + * Verifies an AvroSchema message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns BatchCreateReadSessionStreamsRequest + * @returns AvroSchema */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroSchema; /** - * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. - * @param message BatchCreateReadSessionStreamsRequest + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @param message AvroSchema * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * Converts this AvroSchema to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a BatchCreateReadSessionStreamsResponse. */ - interface IBatchCreateReadSessionStreamsResponse { + /** Properties of an AvroRows. */ + interface IAvroRows { - /** BatchCreateReadSessionStreamsResponse streams */ - streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + /** AvroRows serializedBinaryRows */ + serializedBinaryRows?: (Uint8Array|string|null); + + /** AvroRows rowCount */ + rowCount?: (number|Long|string|null); } - /** Represents a BatchCreateReadSessionStreamsResponse. */ - class BatchCreateReadSessionStreamsResponse implements IBatchCreateReadSessionStreamsResponse { + /** Represents an AvroRows. */ + class AvroRows implements IAvroRows { /** - * Constructs a new BatchCreateReadSessionStreamsResponse. + * Constructs a new AvroRows. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); - /** BatchCreateReadSessionStreamsResponse streams. */ - public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + /** AvroRows serializedBinaryRows. */ + public serializedBinaryRows: (Uint8Array|string); + + /** AvroRows rowCount. */ + public rowCount: (number|Long|string); /** - * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * Creates a new AvroRows instance using the specified properties. * @param [properties] Properties to set - * @returns BatchCreateReadSessionStreamsResponse instance + * @returns AvroRows instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows): google.cloud.bigquery.storage.v1beta1.AvroRows; /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * Decodes an AvroRows message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns BatchCreateReadSessionStreamsResponse + * @returns AvroRows * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroRows; /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * Decodes an AvroRows message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns BatchCreateReadSessionStreamsResponse + * @returns AvroRows * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroRows; /** - * Verifies a BatchCreateReadSessionStreamsResponse message. + * Verifies an AvroRows message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns BatchCreateReadSessionStreamsResponse + * @returns AvroRows */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroRows; /** - * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. - * @param message BatchCreateReadSessionStreamsResponse + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @param message AvroRows * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * Converts this AvroRows to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a FinalizeStreamRequest. */ - interface IFinalizeStreamRequest { + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { - /** FinalizeStreamRequest stream */ - stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); } - /** Represents a FinalizeStreamRequest. */ - class FinalizeStreamRequest implements IFinalizeStreamRequest { + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { /** - * Constructs a new FinalizeStreamRequest. + * Constructs a new TableReadOptions. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions); - /** FinalizeStreamRequest stream. */ - public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; /** - * Creates a new FinalizeStreamRequest instance using the specified properties. + * Creates a new TableReadOptions instance using the specified properties. * @param [properties] Properties to set - * @returns FinalizeStreamRequest instance + * @returns TableReadOptions instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions): google.cloud.bigquery.storage.v1beta1.TableReadOptions; /** - * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @param message FinalizeStreamRequest message or plain object to encode + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @param message FinalizeStreamRequest message or plain object to encode + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * Decodes a TableReadOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FinalizeStreamRequest + * @returns TableReadOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReadOptions; /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FinalizeStreamRequest + * @returns TableReadOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReadOptions; /** - * Verifies a FinalizeStreamRequest message. + * Verifies a TableReadOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FinalizeStreamRequest + * @returns TableReadOptions */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReadOptions; /** - * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. - * @param message FinalizeStreamRequest + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FinalizeStreamRequest to JSON. + * Converts this TableReadOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a SplitReadStreamRequest. */ - interface ISplitReadStreamRequest { + /** Represents a BigQueryStorage */ + class BigQueryStorage extends $protobuf.rpc.Service { - /** SplitReadStreamRequest originalStream */ - originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** + * Constructs a new BigQueryStorage service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); - /** SplitReadStreamRequest fraction */ - fraction?: (number|null); - } + /** + * Creates new BigQueryStorage service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryStorage; - /** Represents a SplitReadStreamRequest. */ - class SplitReadStreamRequest implements ISplitReadStreamRequest { + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadSession + */ + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback): void; /** - * Constructs a new SplitReadStreamRequest. - * @param [properties] Properties to set + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @returns Promise */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest); + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): Promise; - /** SplitReadStreamRequest originalStream. */ - public originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback): void; - /** SplitReadStreamRequest fraction. */ - public fraction: number; + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @returns Promise + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): Promise; /** - * Creates a new SplitReadStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns SplitReadStreamRequest instance + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback): void; /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @returns Promise */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): Promise; /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback): void; /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @returns Promise */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): Promise; /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback): void; /** - * Verifies a SplitReadStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @returns Promise */ - public static verify(message: { [k: string]: any }): (string|null); + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): Promise; + } + + namespace BigQueryStorage { /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SplitReadStreamRequest + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * @param error Error, if any + * @param [response] ReadSession */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. - * @param message SplitReadStreamRequest - * @param [options] Conversion options - * @returns Plain object + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * @param error Error, if any + * @param [response] ReadRowsResponse */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; /** - * Converts this SplitReadStreamRequest to JSON. - * @returns JSON object + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * @param error Error, if any + * @param [response] BatchCreateReadSessionStreamsResponse */ - public toJSON(): { [k: string]: any }; - } + type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; - /** Properties of a SplitReadStreamResponse. */ - interface ISplitReadStreamResponse { + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * @param error Error, if any + * @param [response] Empty + */ + type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; - /** SplitReadStreamResponse primaryStream */ - primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * @param error Error, if any + * @param [response] SplitReadStreamResponse + */ + type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) => void; + } - /** SplitReadStreamResponse remainderStream */ - remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** Properties of a Stream. */ + interface IStream { + + /** Stream name */ + name?: (string|null); } - /** Represents a SplitReadStreamResponse. */ - class SplitReadStreamResponse implements ISplitReadStreamResponse { + /** Represents a Stream. */ + class Stream implements IStream { /** - * Constructs a new SplitReadStreamResponse. + * Constructs a new Stream. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse); - - /** SplitReadStreamResponse primaryStream. */ - public primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStream); - /** SplitReadStreamResponse remainderStream. */ - public remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + /** Stream name. */ + public name: string; /** - * Creates a new SplitReadStreamResponse instance using the specified properties. + * Creates a new Stream instance using the specified properties. * @param [properties] Properties to set - * @returns SplitReadStreamResponse instance + * @returns Stream instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStream): google.cloud.bigquery.storage.v1beta1.Stream; /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * Decodes a Stream message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns SplitReadStreamResponse + * @returns Stream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Stream; /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * Decodes a Stream message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns SplitReadStreamResponse + * @returns Stream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Stream; /** - * Verifies a SplitReadStreamResponse message. + * Verifies a Stream message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * Creates a Stream message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns SplitReadStreamResponse + * @returns Stream */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Stream; /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. - * @param message SplitReadStreamResponse + * Creates a plain object from a Stream message. Also converts values to other types if specified. + * @param message Stream * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Stream, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this SplitReadStreamResponse to JSON. + * Converts this Stream to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a TableReference. */ - interface ITableReference { - - /** TableReference projectId */ - projectId?: (string|null); + /** Properties of a StreamPosition. */ + interface IStreamPosition { - /** TableReference datasetId */ - datasetId?: (string|null); + /** StreamPosition stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - /** TableReference tableId */ - tableId?: (string|null); + /** StreamPosition offset */ + offset?: (number|Long|string|null); } - /** Represents a TableReference. */ - class TableReference implements ITableReference { + /** Represents a StreamPosition. */ + class StreamPosition implements IStreamPosition { /** - * Constructs a new TableReference. + * Constructs a new StreamPosition. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference); - - /** TableReference projectId. */ - public projectId: string; + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition); - /** TableReference datasetId. */ - public datasetId: string; + /** StreamPosition stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - /** TableReference tableId. */ - public tableId: string; + /** StreamPosition offset. */ + public offset: (number|Long|string); /** - * Creates a new TableReference instance using the specified properties. + * Creates a new StreamPosition instance using the specified properties. * @param [properties] Properties to set - * @returns TableReference instance + * @returns StreamPosition instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference): google.cloud.bigquery.storage.v1beta1.TableReference; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition): google.cloud.bigquery.storage.v1beta1.StreamPosition; /** - * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @param message TableReference message or plain object to encode + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @param message TableReference message or plain object to encode + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a TableReference message from the specified reader or buffer. + * Decodes a StreamPosition message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns TableReference + * @returns StreamPosition * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReference; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamPosition; /** - * Decodes a TableReference message from the specified reader or buffer, length delimited. + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns TableReference + * @returns StreamPosition * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReference; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamPosition; /** - * Verifies a TableReference message. + * Verifies a StreamPosition message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns TableReference + * @returns StreamPosition */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReference; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamPosition; /** - * Creates a plain object from a TableReference message. Also converts values to other types if specified. - * @param message TableReference + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * @param message StreamPosition * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamPosition, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this TableReference to JSON. + * Converts this StreamPosition to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a TableModifiers. */ - interface ITableModifiers { + /** Properties of a ReadSession. */ + interface IReadSession { - /** TableModifiers snapshotTime */ - snapshotTime?: (google.protobuf.ITimestamp|null); + /** ReadSession name */ + name?: (string|null); + + /** ReadSession expireTime */ + expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + + /** ReadSession tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); } - /** Represents a TableModifiers. */ - class TableModifiers implements ITableModifiers { + /** Represents a ReadSession. */ + class ReadSession implements IReadSession { /** - * Constructs a new TableModifiers. + * Constructs a new ReadSession. * @param [properties] Properties to set */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers); + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession); - /** TableModifiers snapshotTime. */ - public snapshotTime?: (google.protobuf.ITimestamp|null); + /** ReadSession name. */ + public name: string; + + /** ReadSession expireTime. */ + public expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** ReadSession tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** ReadSession schema. */ + public schema?: ("avroSchema"|"arrowSchema"); /** - * Creates a new TableModifiers instance using the specified properties. + * Creates a new ReadSession instance using the specified properties. * @param [properties] Properties to set - * @returns TableModifiers instance + * @returns ReadSession instance */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers): google.cloud.bigquery.storage.v1beta1.TableModifiers; + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession): google.cloud.bigquery.storage.v1beta1.ReadSession; /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a TableModifiers message from the specified reader or buffer. + * Decodes a ReadSession message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns TableModifiers + * @returns ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableModifiers; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadSession; /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * Decodes a ReadSession message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns TableModifiers + * @returns ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableModifiers; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadSession; /** - * Verifies a TableModifiers message. + * Verifies a ReadSession message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns TableModifiers + * @returns ReadSession */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableModifiers; + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadSession; /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @param message TableModifiers + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @param message ReadSession * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this TableModifiers to JSON. + * Converts this ReadSession to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - } - } - } - } - /** Namespace api. */ - namespace api { + /** Properties of a CreateReadSessionRequest. */ + interface ICreateReadSessionRequest { - /** Properties of a Http. */ - interface IHttp { + /** CreateReadSessionRequest tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); - /** Http rules */ - rules?: (google.api.IHttpRule[]|null); + /** CreateReadSessionRequest parent */ + parent?: (string|null); - /** Http fullyDecodeReservedExpansion */ - fullyDecodeReservedExpansion?: (boolean|null); - } + /** CreateReadSessionRequest tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); - /** Represents a Http. */ - class Http implements IHttp { + /** CreateReadSessionRequest requestedStreams */ + requestedStreams?: (number|null); - /** - * Constructs a new Http. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.IHttp); + /** CreateReadSessionRequest readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); - /** Http rules. */ - public rules: google.api.IHttpRule[]; + /** CreateReadSessionRequest format */ + format?: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat|null); - /** Http fullyDecodeReservedExpansion. */ - public fullyDecodeReservedExpansion: boolean; + /** CreateReadSessionRequest shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + } + + /** Represents a CreateReadSessionRequest. */ + class CreateReadSessionRequest implements ICreateReadSessionRequest { + + /** + * Constructs a new CreateReadSessionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest); + + /** CreateReadSessionRequest tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** CreateReadSessionRequest parent. */ + public parent: string; + + /** CreateReadSessionRequest tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** CreateReadSessionRequest requestedStreams. */ + public requestedStreams: number; + + /** CreateReadSessionRequest readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + + /** CreateReadSessionRequest format. */ + public format: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat); + + /** CreateReadSessionRequest shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateReadSessionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Verifies a CreateReadSessionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateReadSessionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @param message CreateReadSessionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 3 + } + + /** ShardingStrategy enum. */ + enum ShardingStrategy { + SHARDING_STRATEGY_UNSPECIFIED = 0, + LIQUID = 1, + BALANCED = 2 + } + + /** Properties of a ReadRowsRequest. */ + interface IReadRowsRequest { + + /** ReadRowsRequest readPosition */ + readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + } + + /** Represents a ReadRowsRequest. */ + class ReadRowsRequest implements IReadRowsRequest { + + /** + * Constructs a new ReadRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest); + + /** ReadRowsRequest readPosition. */ + public readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Verifies a ReadRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @param message ReadRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a StreamStatus. */ + interface IStreamStatus { + + /** StreamStatus estimatedRowCount */ + estimatedRowCount?: (number|Long|string|null); + + /** StreamStatus fractionConsumed */ + fractionConsumed?: (number|null); + + /** StreamStatus progress */ + progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable */ + isSplittable?: (boolean|null); + } + + /** Represents a StreamStatus. */ + class StreamStatus implements IStreamStatus { + + /** + * Constructs a new StreamStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus); + + /** StreamStatus estimatedRowCount. */ + public estimatedRowCount: (number|Long|string); + + /** StreamStatus fractionConsumed. */ + public fractionConsumed: number; + + /** StreamStatus progress. */ + public progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable. */ + public isSplittable: boolean; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Verifies a StreamStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @param message StreamStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a Progress. */ + interface IProgress { + + /** Progress atResponseStart */ + atResponseStart?: (number|null); + + /** Progress atResponseEnd */ + atResponseEnd?: (number|null); + } + + /** Represents a Progress. */ + class Progress implements IProgress { + + /** + * Constructs a new Progress. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IProgress); + + /** Progress atResponseStart. */ + public atResponseStart: number; + + /** Progress atResponseEnd. */ + public atResponseEnd: number; + + /** + * Creates a new Progress instance using the specified properties. + * @param [properties] Properties to set + * @returns Progress instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IProgress): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Verifies a Progress message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Progress + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @param message Progress + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Progress to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ThrottleStatus. */ + interface IThrottleStatus { + + /** ThrottleStatus throttlePercent */ + throttlePercent?: (number|null); + } + + /** Represents a ThrottleStatus. */ + class ThrottleStatus implements IThrottleStatus { + + /** + * Constructs a new ThrottleStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus); + + /** ThrottleStatus throttlePercent. */ + public throttlePercent: number; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns ThrottleStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Verifies a ThrottleStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ThrottleStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @param message ThrottleStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ThrottleStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ThrottleStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReadRowsResponse. */ + interface IReadRowsResponse { + + /** ReadRowsResponse avroRows */ + avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch */ + arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount */ + rowCount?: (number|Long|string|null); + + /** ReadRowsResponse status */ + status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus */ + throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + } + + /** Represents a ReadRowsResponse. */ + class ReadRowsResponse implements IReadRowsResponse { + + /** + * Constructs a new ReadRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse); + + /** ReadRowsResponse avroRows. */ + public avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch. */ + public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount. */ + public rowCount: (number|Long|string); + + /** ReadRowsResponse status. */ + public status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus. */ + public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + + /** ReadRowsResponse rows. */ + public rows?: ("avroRows"|"arrowRecordBatch"); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Verifies a ReadRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @param message ReadRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a BatchCreateReadSessionStreamsRequest. */ + interface IBatchCreateReadSessionStreamsRequest { + + /** BatchCreateReadSessionStreamsRequest session */ + session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams */ + requestedStreams?: (number|null); + } + + /** Represents a BatchCreateReadSessionStreamsRequest. */ + class BatchCreateReadSessionStreamsRequest implements IBatchCreateReadSessionStreamsRequest { + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest); + + /** BatchCreateReadSessionStreamsRequest session. */ + public session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams. */ + public requestedStreams: number; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a BatchCreateReadSessionStreamsResponse. */ + interface IBatchCreateReadSessionStreamsResponse { + + /** BatchCreateReadSessionStreamsResponse streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + } + + /** Represents a BatchCreateReadSessionStreamsResponse. */ + class BatchCreateReadSessionStreamsResponse implements IBatchCreateReadSessionStreamsResponse { + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse); + + /** BatchCreateReadSessionStreamsResponse streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FinalizeStreamRequest. */ + interface IFinalizeStreamRequest { + + /** FinalizeStreamRequest stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a FinalizeStreamRequest. */ + class FinalizeStreamRequest implements IFinalizeStreamRequest { + + /** + * Constructs a new FinalizeStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest); + + /** FinalizeStreamRequest stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns FinalizeStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Verifies a FinalizeStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FinalizeStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @param message FinalizeStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamRequest. */ + interface ISplitReadStreamRequest { + + /** SplitReadStreamRequest originalStream */ + originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction */ + fraction?: (number|null); + } + + /** Represents a SplitReadStreamRequest. */ + class SplitReadStreamRequest implements ISplitReadStreamRequest { + + /** + * Constructs a new SplitReadStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest); + + /** SplitReadStreamRequest originalStream. */ + public originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction. */ + public fraction: number; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Verifies a SplitReadStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @param message SplitReadStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a SplitReadStreamResponse. */ + interface ISplitReadStreamResponse { + + /** SplitReadStreamResponse primaryStream */ + primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream */ + remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a SplitReadStreamResponse. */ + class SplitReadStreamResponse implements ISplitReadStreamResponse { + + /** + * Constructs a new SplitReadStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse); + + /** SplitReadStreamResponse primaryStream. */ + public primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream. */ + public remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Verifies a SplitReadStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @param message SplitReadStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableReference. */ + interface ITableReference { + + /** TableReference projectId */ + projectId?: (string|null); + + /** TableReference datasetId */ + datasetId?: (string|null); + + /** TableReference tableId */ + tableId?: (string|null); + } + + /** Represents a TableReference. */ + class TableReference implements ITableReference { + + /** + * Constructs a new TableReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference); + + /** TableReference projectId. */ + public projectId: string; + + /** TableReference datasetId. */ + public datasetId: string; + + /** TableReference tableId. */ + public tableId: string; + + /** + * Creates a new TableReference instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReference instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Verifies a TableReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReference + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @param message TableReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a TableModifiers. */ + interface ITableModifiers { + + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { + + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers); + + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + } + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label); + + /** FieldDescriptorProto type. */ + public type: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type); + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FileOptions .google.api.resourceDefinition */ + ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode); + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new Http instance using the specified properties. + * Creates a new FileOptions instance using the specified properties. * @param [properties] Properties to set - * @returns Http instance + * @returns FileOptions instance */ - public static create(properties?: google.api.IHttp): google.api.Http; + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; /** - * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @param message Http message or plain object to encode + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @param message Http message or plain object to encode + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a Http message from the specified reader or buffer. + * Decodes a FileOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns Http + * @returns FileOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; /** - * Decodes a Http message from the specified reader or buffer, length delimited. + * Decodes a FileOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns Http + * @returns FileOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; /** - * Verifies a Http message. + * Verifies a FileOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a Http message from a plain object. Also converts values to their respective internal types. + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns Http + * @returns FileOptions */ - public static fromObject(object: { [k: string]: any }): google.api.Http; + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; /** - * Creates a plain object from a Http message. Also converts values to other types if specified. - * @param message Http + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this Http to JSON. + * Converts this FileOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a HttpRule. */ - interface IHttpRule { - - /** HttpRule selector */ - selector?: (string|null); - - /** HttpRule get */ - get?: (string|null); + namespace FileOptions { - /** HttpRule put */ - put?: (string|null); + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } - /** HttpRule post */ - post?: (string|null); + /** Properties of a MessageOptions. */ + interface IMessageOptions { - /** HttpRule delete */ - "delete"?: (string|null); + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); - /** HttpRule patch */ - patch?: (string|null); + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); - /** HttpRule custom */ - custom?: (google.api.ICustomHttpPattern|null); + /** MessageOptions deprecated */ + deprecated?: (boolean|null); - /** HttpRule body */ - body?: (string|null); + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); - /** HttpRule responseBody */ - responseBody?: (string|null); + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - /** HttpRule additionalBindings */ - additionalBindings?: (google.api.IHttpRule[]|null); + /** MessageOptions .google.api.resource */ + ".google.api.resource"?: (google.api.IResourceDescriptor|null); } - /** Represents a HttpRule. */ - class HttpRule implements IHttpRule { + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { /** - * Constructs a new HttpRule. + * Constructs a new MessageOptions. * @param [properties] Properties to set */ - constructor(properties?: google.api.IHttpRule); - - /** HttpRule selector. */ - public selector: string; - - /** HttpRule get. */ - public get?: (string|null); - - /** HttpRule put. */ - public put?: (string|null); - - /** HttpRule post. */ - public post?: (string|null); - - /** HttpRule delete. */ - public delete?: (string|null); - - /** HttpRule patch. */ - public patch?: (string|null); + constructor(properties?: google.protobuf.IMessageOptions); - /** HttpRule custom. */ - public custom?: (google.api.ICustomHttpPattern|null); + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; - /** HttpRule body. */ - public body: string; + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; - /** HttpRule responseBody. */ - public responseBody: string; + /** MessageOptions deprecated. */ + public deprecated: boolean; - /** HttpRule additionalBindings. */ - public additionalBindings: google.api.IHttpRule[]; + /** MessageOptions mapEntry. */ + public mapEntry: boolean; - /** HttpRule pattern. */ - public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new HttpRule instance using the specified properties. + * Creates a new MessageOptions instance using the specified properties. * @param [properties] Properties to set - * @returns HttpRule instance + * @returns MessageOptions instance */ - public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; /** - * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @param message HttpRule message or plain object to encode + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @param message HttpRule message or plain object to encode + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a HttpRule message from the specified reader or buffer. + * Decodes a MessageOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns HttpRule + * @returns MessageOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; /** - * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns HttpRule + * @returns MessageOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; /** - * Verifies a HttpRule message. + * Verifies a MessageOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns HttpRule + * @returns MessageOptions */ - public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; /** - * Creates a plain object from a HttpRule message. Also converts values to other types if specified. - * @param message HttpRule + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this HttpRule to JSON. + * Converts this MessageOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a CustomHttpPattern. */ - interface ICustomHttpPattern { + /** Properties of a FieldOptions. */ + interface IFieldOptions { - /** CustomHttpPattern kind */ - kind?: (string|null); + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType|null); - /** CustomHttpPattern path */ - path?: (string|null); + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FieldOptions .google.api.fieldBehavior */ + ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); + + /** FieldOptions .google.api.resourceReference */ + ".google.api.resourceReference"?: (google.api.IResourceReference|null); } - /** Represents a CustomHttpPattern. */ - class CustomHttpPattern implements ICustomHttpPattern { + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { /** - * Constructs a new CustomHttpPattern. + * Constructs a new FieldOptions. * @param [properties] Properties to set */ - constructor(properties?: google.api.ICustomHttpPattern); + constructor(properties?: google.protobuf.IFieldOptions); - /** CustomHttpPattern kind. */ - public kind: string; + /** FieldOptions ctype. */ + public ctype: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType); - /** CustomHttpPattern path. */ - public path: string; + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType); + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new CustomHttpPattern instance using the specified properties. + * Creates a new FieldOptions instance using the specified properties. * @param [properties] Properties to set - * @returns CustomHttpPattern instance + * @returns FieldOptions instance */ - public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; /** - * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @param message CustomHttpPattern message or plain object to encode + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @param message CustomHttpPattern message or plain object to encode + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a CustomHttpPattern message from the specified reader or buffer. + * Decodes a FieldOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns CustomHttpPattern + * @returns FieldOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; /** - * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns CustomHttpPattern + * @returns FieldOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; /** - * Verifies a CustomHttpPattern message. + * Verifies a FieldOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns CustomHttpPattern + * @returns FieldOptions */ - public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; /** - * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. - * @param message CustomHttpPattern + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this CustomHttpPattern to JSON. + * Converts this FieldOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** FieldBehavior enum. */ - enum FieldBehavior { - FIELD_BEHAVIOR_UNSPECIFIED = 0, - OPTIONAL = 1, - REQUIRED = 2, - OUTPUT_ONLY = 3, - INPUT_ONLY = 4, - IMMUTABLE = 5, - UNORDERED_LIST = 6 - } - - /** Properties of a ResourceDescriptor. */ - interface IResourceDescriptor { - - /** ResourceDescriptor type */ - type?: (string|null); - - /** ResourceDescriptor pattern */ - pattern?: (string[]|null); - - /** ResourceDescriptor nameField */ - nameField?: (string|null); + namespace FieldOptions { - /** ResourceDescriptor history */ - history?: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History|null); + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } - /** ResourceDescriptor plural */ - plural?: (string|null); + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } - /** ResourceDescriptor singular */ - singular?: (string|null); + /** Properties of an OneofOptions. */ + interface IOneofOptions { - /** ResourceDescriptor style */ - style?: (google.api.ResourceDescriptor.Style[]|null); + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } - /** Represents a ResourceDescriptor. */ - class ResourceDescriptor implements IResourceDescriptor { + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { /** - * Constructs a new ResourceDescriptor. + * Constructs a new OneofOptions. * @param [properties] Properties to set */ - constructor(properties?: google.api.IResourceDescriptor); - - /** ResourceDescriptor type. */ - public type: string; - - /** ResourceDescriptor pattern. */ - public pattern: string[]; - - /** ResourceDescriptor nameField. */ - public nameField: string; - - /** ResourceDescriptor history. */ - public history: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History); - - /** ResourceDescriptor plural. */ - public plural: string; - - /** ResourceDescriptor singular. */ - public singular: string; + constructor(properties?: google.protobuf.IOneofOptions); - /** ResourceDescriptor style. */ - public style: google.api.ResourceDescriptor.Style[]; + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new ResourceDescriptor instance using the specified properties. + * Creates a new OneofOptions instance using the specified properties. * @param [properties] Properties to set - * @returns ResourceDescriptor instance + * @returns OneofOptions instance */ - public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; /** - * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @param message ResourceDescriptor message or plain object to encode + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @param message ResourceDescriptor message or plain object to encode + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ResourceDescriptor message from the specified reader or buffer. + * Decodes an OneofOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ResourceDescriptor + * @returns OneofOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; /** - * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ResourceDescriptor + * @returns OneofOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; /** - * Verifies a ResourceDescriptor message. + * Verifies an OneofOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ResourceDescriptor + * @returns OneofOptions */ - public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; /** - * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. - * @param message ResourceDescriptor + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ResourceDescriptor to JSON. + * Converts this OneofOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace ResourceDescriptor { - - /** History enum. */ - enum History { - HISTORY_UNSPECIFIED = 0, - ORIGINALLY_SINGLE_PATTERN = 1, - FUTURE_MULTI_PATTERN = 2 - } - - /** Style enum. */ - enum Style { - STYLE_UNSPECIFIED = 0, - DECLARATIVE_FRIENDLY = 1 - } - } + /** Properties of an EnumOptions. */ + interface IEnumOptions { - /** Properties of a ResourceReference. */ - interface IResourceReference { + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); - /** ResourceReference type */ - type?: (string|null); + /** EnumOptions deprecated */ + deprecated?: (boolean|null); - /** ResourceReference childType */ - childType?: (string|null); + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } - /** Represents a ResourceReference. */ - class ResourceReference implements IResourceReference { + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { /** - * Constructs a new ResourceReference. + * Constructs a new EnumOptions. * @param [properties] Properties to set */ - constructor(properties?: google.api.IResourceReference); + constructor(properties?: google.protobuf.IEnumOptions); - /** ResourceReference type. */ - public type: string; + /** EnumOptions allowAlias. */ + public allowAlias: boolean; - /** ResourceReference childType. */ - public childType: string; + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new ResourceReference instance using the specified properties. + * Creates a new EnumOptions instance using the specified properties. * @param [properties] Properties to set - * @returns ResourceReference instance + * @returns EnumOptions instance */ - public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; /** - * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @param message ResourceReference message or plain object to encode + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @param message ResourceReference message or plain object to encode + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ResourceReference message from the specified reader or buffer. + * Decodes an EnumOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ResourceReference + * @returns EnumOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; /** - * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ResourceReference + * @returns EnumOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; /** - * Verifies a ResourceReference message. + * Verifies an EnumOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ResourceReference + * @returns EnumOptions */ - public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; /** - * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. - * @param message ResourceReference + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ResourceReference to JSON. + * Converts this EnumOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - } - /** Namespace protobuf. */ - namespace protobuf { + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { - /** Properties of a FileDescriptorSet. */ - interface IFileDescriptorSet { + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); - /** FileDescriptorSet file */ - file?: (google.protobuf.IFileDescriptorProto[]|null); + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } - /** Represents a FileDescriptorSet. */ - class FileDescriptorSet implements IFileDescriptorSet { + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { /** - * Constructs a new FileDescriptorSet. + * Constructs a new EnumValueOptions. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IFileDescriptorSet); + constructor(properties?: google.protobuf.IEnumValueOptions); - /** FileDescriptorSet file. */ - public file: google.protobuf.IFileDescriptorProto[]; + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new FileDescriptorSet instance using the specified properties. + * Creates a new EnumValueOptions instance using the specified properties. * @param [properties] Properties to set - * @returns FileDescriptorSet instance + * @returns EnumValueOptions instance */ - public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; /** - * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @param message FileDescriptorSet message or plain object to encode + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @param message FileDescriptorSet message or plain object to encode + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FileDescriptorSet message from the specified reader or buffer. + * Decodes an EnumValueOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FileDescriptorSet + * @returns EnumValueOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; /** - * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FileDescriptorSet + * @returns EnumValueOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; /** - * Verifies a FileDescriptorSet message. + * Verifies an EnumValueOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FileDescriptorSet + * @returns EnumValueOptions */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; /** - * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. - * @param message FileDescriptorSet + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FileDescriptorSet to JSON. + * Converts this EnumValueOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a FileDescriptorProto. */ - interface IFileDescriptorProto { - - /** FileDescriptorProto name */ - name?: (string|null); - - /** FileDescriptorProto package */ - "package"?: (string|null); - - /** FileDescriptorProto dependency */ - dependency?: (string[]|null); - - /** FileDescriptorProto publicDependency */ - publicDependency?: (number[]|null); - - /** FileDescriptorProto weakDependency */ - weakDependency?: (number[]|null); - - /** FileDescriptorProto messageType */ - messageType?: (google.protobuf.IDescriptorProto[]|null); - - /** FileDescriptorProto enumType */ - enumType?: (google.protobuf.IEnumDescriptorProto[]|null); - - /** FileDescriptorProto service */ - service?: (google.protobuf.IServiceDescriptorProto[]|null); + /** Properties of a ServiceOptions. */ + interface IServiceOptions { - /** FileDescriptorProto extension */ - extension?: (google.protobuf.IFieldDescriptorProto[]|null); + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); - /** FileDescriptorProto options */ - options?: (google.protobuf.IFileOptions|null); + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - /** FileDescriptorProto sourceCodeInfo */ - sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); - /** FileDescriptorProto syntax */ - syntax?: (string|null); + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); } - /** Represents a FileDescriptorProto. */ - class FileDescriptorProto implements IFileDescriptorProto { + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { /** - * Constructs a new FileDescriptorProto. + * Constructs a new ServiceOptions. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IFileDescriptorProto); - - /** FileDescriptorProto name. */ - public name: string; - - /** FileDescriptorProto package. */ - public package: string; - - /** FileDescriptorProto dependency. */ - public dependency: string[]; - - /** FileDescriptorProto publicDependency. */ - public publicDependency: number[]; - - /** FileDescriptorProto weakDependency. */ - public weakDependency: number[]; - - /** FileDescriptorProto messageType. */ - public messageType: google.protobuf.IDescriptorProto[]; - - /** FileDescriptorProto enumType. */ - public enumType: google.protobuf.IEnumDescriptorProto[]; - - /** FileDescriptorProto service. */ - public service: google.protobuf.IServiceDescriptorProto[]; - - /** FileDescriptorProto extension. */ - public extension: google.protobuf.IFieldDescriptorProto[]; - - /** FileDescriptorProto options. */ - public options?: (google.protobuf.IFileOptions|null); + constructor(properties?: google.protobuf.IServiceOptions); - /** FileDescriptorProto sourceCodeInfo. */ - public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + /** ServiceOptions deprecated. */ + public deprecated: boolean; - /** FileDescriptorProto syntax. */ - public syntax: string; + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new FileDescriptorProto instance using the specified properties. + * Creates a new ServiceOptions instance using the specified properties. * @param [properties] Properties to set - * @returns FileDescriptorProto instance + * @returns ServiceOptions instance */ - public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; /** - * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @param message FileDescriptorProto message or plain object to encode + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @param message FileDescriptorProto message or plain object to encode + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FileDescriptorProto message from the specified reader or buffer. + * Decodes a ServiceOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FileDescriptorProto + * @returns ServiceOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; /** - * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FileDescriptorProto + * @returns ServiceOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; /** - * Verifies a FileDescriptorProto message. + * Verifies a ServiceOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FileDescriptorProto + * @returns ServiceOptions */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; /** - * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. - * @param message FileDescriptorProto + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FileDescriptorProto to JSON. + * Converts this ServiceOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a DescriptorProto. */ - interface IDescriptorProto { - - /** DescriptorProto name */ - name?: (string|null); - - /** DescriptorProto field */ - field?: (google.protobuf.IFieldDescriptorProto[]|null); - - /** DescriptorProto extension */ - extension?: (google.protobuf.IFieldDescriptorProto[]|null); - - /** DescriptorProto nestedType */ - nestedType?: (google.protobuf.IDescriptorProto[]|null); - - /** DescriptorProto enumType */ - enumType?: (google.protobuf.IEnumDescriptorProto[]|null); - - /** DescriptorProto extensionRange */ - extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); - - /** DescriptorProto oneofDecl */ - oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); - - /** DescriptorProto options */ - options?: (google.protobuf.IMessageOptions|null); - - /** DescriptorProto reservedRange */ - reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); - - /** DescriptorProto reservedName */ - reservedName?: (string[]|null); - } - - /** Represents a DescriptorProto. */ - class DescriptorProto implements IDescriptorProto { - - /** - * Constructs a new DescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IDescriptorProto); + /** Properties of a MethodOptions. */ + interface IMethodOptions { - /** DescriptorProto name. */ - public name: string; + /** MethodOptions deprecated */ + deprecated?: (boolean|null); - /** DescriptorProto field. */ - public field: google.protobuf.IFieldDescriptorProto[]; + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); - /** DescriptorProto extension. */ - public extension: google.protobuf.IFieldDescriptorProto[]; + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - /** DescriptorProto nestedType. */ - public nestedType: google.protobuf.IDescriptorProto[]; + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); - /** DescriptorProto enumType. */ - public enumType: google.protobuf.IEnumDescriptorProto[]; + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } - /** DescriptorProto extensionRange. */ - public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { - /** DescriptorProto oneofDecl. */ - public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); - /** DescriptorProto options. */ - public options?: (google.protobuf.IMessageOptions|null); + /** MethodOptions deprecated. */ + public deprecated: boolean; - /** DescriptorProto reservedRange. */ - public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); - /** DescriptorProto reservedName. */ - public reservedName: string[]; + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; /** - * Creates a new DescriptorProto instance using the specified properties. + * Creates a new MethodOptions instance using the specified properties. * @param [properties] Properties to set - * @returns DescriptorProto instance + * @returns MethodOptions instance */ - public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; /** - * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @param message DescriptorProto message or plain object to encode + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @param message DescriptorProto message or plain object to encode + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a DescriptorProto message from the specified reader or buffer. + * Decodes a MethodOptions message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns DescriptorProto + * @returns MethodOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; /** - * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns DescriptorProto + * @returns MethodOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; /** - * Verifies a DescriptorProto message. + * Verifies a MethodOptions message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns DescriptorProto + * @returns MethodOptions */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; /** - * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. - * @param message DescriptorProto + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this DescriptorProto to JSON. + * Converts this MethodOptions to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace DescriptorProto { - - /** Properties of an ExtensionRange. */ - interface IExtensionRange { - - /** ExtensionRange start */ - start?: (number|null); - - /** ExtensionRange end */ - end?: (number|null); - - /** ExtensionRange options */ - options?: (google.protobuf.IExtensionRangeOptions|null); - } - - /** Represents an ExtensionRange. */ - class ExtensionRange implements IExtensionRange { - - /** - * Constructs a new ExtensionRange. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); - - /** ExtensionRange start. */ - public start: number; - - /** ExtensionRange end. */ - public end: number; - - /** ExtensionRange options. */ - public options?: (google.protobuf.IExtensionRangeOptions|null); - - /** - * Creates a new ExtensionRange instance using the specified properties. - * @param [properties] Properties to set - * @returns ExtensionRange instance - */ - public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @param message ExtensionRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @param message ExtensionRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Verifies an ExtensionRange message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ExtensionRange - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. - * @param message ExtensionRange - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ExtensionRange to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } - - /** Properties of a ReservedRange. */ - interface IReservedRange { - - /** ReservedRange start */ - start?: (number|null); + namespace MethodOptions { - /** ReservedRange end */ - end?: (number|null); + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 } + } - /** Represents a ReservedRange. */ - class ReservedRange implements IReservedRange { - - /** - * Constructs a new ReservedRange. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); - - /** ReservedRange start. */ - public start: number; - - /** ReservedRange end. */ - public end: number; - - /** - * Creates a new ReservedRange instance using the specified properties. - * @param [properties] Properties to set - * @returns ReservedRange instance - */ - public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; - - /** - * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @param message ReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @param message ReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReservedRange message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { - /** - * Decodes a ReservedRange message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); - /** - * Verifies a ReservedRange message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); - /** - * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReservedRange - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|string|null); - /** - * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. - * @param message ReservedRange - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|string|null); - /** - * Converts this ReservedRange to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } - } + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); - /** Properties of an ExtensionRangeOptions. */ - interface IExtensionRangeOptions { + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|string|null); - /** ExtensionRangeOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); } - /** Represents an ExtensionRangeOptions. */ - class ExtensionRangeOptions implements IExtensionRangeOptions { + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { /** - * Constructs a new ExtensionRangeOptions. + * Constructs a new UninterpretedOption. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IExtensionRangeOptions); + constructor(properties?: google.protobuf.IUninterpretedOption); - /** ExtensionRangeOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long|string); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long|string); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: (Uint8Array|string); + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; /** - * Creates a new ExtensionRangeOptions instance using the specified properties. + * Creates a new UninterpretedOption instance using the specified properties. * @param [properties] Properties to set - * @returns ExtensionRangeOptions instance + * @returns UninterpretedOption instance */ - public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; /** - * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @param message ExtensionRangeOptions message or plain object to encode + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @param message ExtensionRangeOptions message or plain object to encode + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * Decodes an UninterpretedOption message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ExtensionRangeOptions + * @returns UninterpretedOption * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ExtensionRangeOptions + * @returns UninterpretedOption * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; /** - * Verifies an ExtensionRangeOptions message. + * Verifies an UninterpretedOption message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ExtensionRangeOptions + * @returns UninterpretedOption */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; /** - * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. - * @param message ExtensionRangeOptions + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ExtensionRangeOptions to JSON. + * Converts this UninterpretedOption to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a FieldDescriptorProto. */ - interface IFieldDescriptorProto { - - /** FieldDescriptorProto name */ - name?: (string|null); - - /** FieldDescriptorProto number */ - number?: (number|null); - - /** FieldDescriptorProto label */ - label?: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label|null); + namespace UninterpretedOption { - /** FieldDescriptorProto type */ - type?: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type|null); + /** Properties of a NamePart. */ + interface INamePart { - /** FieldDescriptorProto typeName */ - typeName?: (string|null); + /** NamePart namePart */ + namePart: string; - /** FieldDescriptorProto extendee */ - extendee?: (string|null); + /** NamePart isExtension */ + isExtension: boolean; + } - /** FieldDescriptorProto defaultValue */ - defaultValue?: (string|null); + /** Represents a NamePart. */ + class NamePart implements INamePart { - /** FieldDescriptorProto oneofIndex */ - oneofIndex?: (number|null); + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); - /** FieldDescriptorProto jsonName */ - jsonName?: (string|null); + /** NamePart namePart. */ + public namePart: string; - /** FieldDescriptorProto options */ - options?: (google.protobuf.IFieldOptions|null); + /** NamePart isExtension. */ + public isExtension: boolean; - /** FieldDescriptorProto proto3Optional */ - proto3Optional?: (boolean|null); - } + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; - /** Represents a FieldDescriptorProto. */ - class FieldDescriptorProto implements IFieldDescriptorProto { + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - /** - * Constructs a new FieldDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFieldDescriptorProto); + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - /** FieldDescriptorProto name. */ - public name: string; + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; - /** FieldDescriptorProto number. */ - public number: number; + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; - /** FieldDescriptorProto label. */ - public label: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label); + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); - /** FieldDescriptorProto type. */ - public type: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type); + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; - /** FieldDescriptorProto typeName. */ - public typeName: string; + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** FieldDescriptorProto extendee. */ - public extendee: string; + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } - /** FieldDescriptorProto defaultValue. */ - public defaultValue: string; + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { - /** FieldDescriptorProto oneofIndex. */ - public oneofIndex: number; + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } - /** FieldDescriptorProto jsonName. */ - public jsonName: string; + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { - /** FieldDescriptorProto options. */ - public options?: (google.protobuf.IFieldOptions|null); + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); - /** FieldDescriptorProto proto3Optional. */ - public proto3Optional: boolean; + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; /** - * Creates a new FieldDescriptorProto instance using the specified properties. + * Creates a new SourceCodeInfo instance using the specified properties. * @param [properties] Properties to set - * @returns FieldDescriptorProto instance + * @returns SourceCodeInfo instance */ - public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; /** - * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @param message FieldDescriptorProto message or plain object to encode + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @param message FieldDescriptorProto message or plain object to encode + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * Decodes a SourceCodeInfo message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FieldDescriptorProto + * @returns SourceCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FieldDescriptorProto + * @returns SourceCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; /** - * Verifies a FieldDescriptorProto message. + * Verifies a SourceCodeInfo message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FieldDescriptorProto + * @returns SourceCodeInfo */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; /** - * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. - * @param message FieldDescriptorProto + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FieldDescriptorProto to JSON. + * Converts this SourceCodeInfo to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace FieldDescriptorProto { + namespace SourceCodeInfo { - /** Type enum. */ - enum Type { - TYPE_DOUBLE = 1, - TYPE_FLOAT = 2, - TYPE_INT64 = 3, - TYPE_UINT64 = 4, - TYPE_INT32 = 5, - TYPE_FIXED64 = 6, - TYPE_FIXED32 = 7, - TYPE_BOOL = 8, - TYPE_STRING = 9, - TYPE_GROUP = 10, - TYPE_MESSAGE = 11, - TYPE_BYTES = 12, - TYPE_UINT32 = 13, - TYPE_ENUM = 14, - TYPE_SFIXED32 = 15, - TYPE_SFIXED64 = 16, - TYPE_SINT32 = 17, - TYPE_SINT64 = 18 - } + /** Properties of a Location. */ + interface ILocation { - /** Label enum. */ - enum Label { - LABEL_OPTIONAL = 1, - LABEL_REQUIRED = 2, - LABEL_REPEATED = 3 - } - } + /** Location path */ + path?: (number[]|null); - /** Properties of an OneofDescriptorProto. */ - interface IOneofDescriptorProto { + /** Location span */ + span?: (number[]|null); - /** OneofDescriptorProto name */ - name?: (string|null); + /** Location leadingComments */ + leadingComments?: (string|null); - /** OneofDescriptorProto options */ - options?: (google.protobuf.IOneofOptions|null); - } + /** Location trailingComments */ + trailingComments?: (string|null); - /** Represents an OneofDescriptorProto. */ - class OneofDescriptorProto implements IOneofDescriptorProto { + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } - /** - * Constructs a new OneofDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IOneofDescriptorProto); + /** Represents a Location. */ + class Location implements ILocation { - /** OneofDescriptorProto name. */ - public name: string; + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); - /** OneofDescriptorProto options. */ - public options?: (google.protobuf.IOneofOptions|null); + /** Location path. */ + public path: number[]; - /** - * Creates a new OneofDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns OneofDescriptorProto instance - */ - public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + /** Location span. */ + public span: number[]; - /** - * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @param message OneofDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + /** Location leadingComments. */ + public leadingComments: string; - /** - * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @param message OneofDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + /** Location trailingComments. */ + public trailingComments: string; - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; - /** - * Verifies an OneofDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; - /** - * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns OneofDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; - /** - * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. - * @param message OneofDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; - /** - * Converts this OneofDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; - /** Properties of an EnumDescriptorProto. */ - interface IEnumDescriptorProto { + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); - /** EnumDescriptorProto name */ - name?: (string|null); + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; - /** EnumDescriptorProto value */ - value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** EnumDescriptorProto options */ - options?: (google.protobuf.IEnumOptions|null); + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } - /** EnumDescriptorProto reservedRange */ - reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { - /** EnumDescriptorProto reservedName */ - reservedName?: (string[]|null); + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); } - /** Represents an EnumDescriptorProto. */ - class EnumDescriptorProto implements IEnumDescriptorProto { + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { /** - * Constructs a new EnumDescriptorProto. + * Constructs a new GeneratedCodeInfo. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IEnumDescriptorProto); - - /** EnumDescriptorProto name. */ - public name: string; - - /** EnumDescriptorProto value. */ - public value: google.protobuf.IEnumValueDescriptorProto[]; - - /** EnumDescriptorProto options. */ - public options?: (google.protobuf.IEnumOptions|null); - - /** EnumDescriptorProto reservedRange. */ - public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + constructor(properties?: google.protobuf.IGeneratedCodeInfo); - /** EnumDescriptorProto reservedName. */ - public reservedName: string[]; + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; /** - * Creates a new EnumDescriptorProto instance using the specified properties. + * Creates a new GeneratedCodeInfo instance using the specified properties. * @param [properties] Properties to set - * @returns EnumDescriptorProto instance + * @returns GeneratedCodeInfo instance */ - public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; /** - * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @param message EnumDescriptorProto message or plain object to encode + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @param message EnumDescriptorProto message or plain object to encode + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns EnumDescriptorProto + * @returns GeneratedCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns EnumDescriptorProto + * @returns GeneratedCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; /** - * Verifies an EnumDescriptorProto message. + * Verifies a GeneratedCodeInfo message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns EnumDescriptorProto + * @returns GeneratedCodeInfo */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; /** - * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. - * @param message EnumDescriptorProto + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this EnumDescriptorProto to JSON. + * Converts this GeneratedCodeInfo to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace EnumDescriptorProto { + namespace GeneratedCodeInfo { - /** Properties of an EnumReservedRange. */ - interface IEnumReservedRange { + /** Properties of an Annotation. */ + interface IAnnotation { - /** EnumReservedRange start */ - start?: (number|null); + /** Annotation path */ + path?: (number[]|null); - /** EnumReservedRange end */ + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ end?: (number|null); } - /** Represents an EnumReservedRange. */ - class EnumReservedRange implements IEnumReservedRange { + /** Represents an Annotation. */ + class Annotation implements IAnnotation { /** - * Constructs a new EnumReservedRange. + * Constructs a new Annotation. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); - /** EnumReservedRange start. */ - public start: number; + /** Annotation path. */ + public path: number[]; - /** EnumReservedRange end. */ + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ public end: number; /** - * Creates a new EnumReservedRange instance using the specified properties. + * Creates a new Annotation instance using the specified properties. * @param [properties] Properties to set - * @returns EnumReservedRange instance + * @returns Annotation instance */ - public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; /** - * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @param message EnumReservedRange message or plain object to encode + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @param message EnumReservedRange message or plain object to encode + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an EnumReservedRange message from the specified reader or buffer. + * Decodes an Annotation message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns EnumReservedRange + * @returns Annotation * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; /** - * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * Decodes an Annotation message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns EnumReservedRange + * @returns Annotation * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; /** - * Verifies an EnumReservedRange message. + * Verifies an Annotation message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns EnumReservedRange + * @returns Annotation */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; /** - * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. - * @param message EnumReservedRange + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this EnumReservedRange to JSON. + * Converts this Annotation to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } } - /** Properties of an EnumValueDescriptorProto. */ - interface IEnumValueDescriptorProto { - - /** EnumValueDescriptorProto name */ - name?: (string|null); + /** Properties of a Timestamp. */ + interface ITimestamp { - /** EnumValueDescriptorProto number */ - number?: (number|null); + /** Timestamp seconds */ + seconds?: (number|Long|string|null); - /** EnumValueDescriptorProto options */ - options?: (google.protobuf.IEnumValueOptions|null); + /** Timestamp nanos */ + nanos?: (number|null); } - /** Represents an EnumValueDescriptorProto. */ - class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + /** Represents a Timestamp. */ + class Timestamp implements ITimestamp { /** - * Constructs a new EnumValueDescriptorProto. + * Constructs a new Timestamp. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IEnumValueDescriptorProto); - - /** EnumValueDescriptorProto name. */ - public name: string; + constructor(properties?: google.protobuf.ITimestamp); - /** EnumValueDescriptorProto number. */ - public number: number; + /** Timestamp seconds. */ + public seconds: (number|Long|string); - /** EnumValueDescriptorProto options. */ - public options?: (google.protobuf.IEnumValueOptions|null); + /** Timestamp nanos. */ + public nanos: number; /** - * Creates a new EnumValueDescriptorProto instance using the specified properties. + * Creates a new Timestamp instance using the specified properties. * @param [properties] Properties to set - * @returns EnumValueDescriptorProto instance + * @returns Timestamp instance */ - public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; /** - * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @param message EnumValueDescriptorProto message or plain object to encode + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @param message EnumValueDescriptorProto message or plain object to encode + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * Decodes a Timestamp message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns EnumValueDescriptorProto + * @returns Timestamp * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a Timestamp message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns EnumValueDescriptorProto + * @returns Timestamp * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; /** - * Verifies an EnumValueDescriptorProto message. + * Verifies a Timestamp message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns EnumValueDescriptorProto + * @returns Timestamp */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; /** - * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. - * @param message EnumValueDescriptorProto + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @param message Timestamp * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this EnumValueDescriptorProto to JSON. + * Converts this Timestamp to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a ServiceDescriptorProto. */ - interface IServiceDescriptorProto { - - /** ServiceDescriptorProto name */ - name?: (string|null); - - /** ServiceDescriptorProto method */ - method?: (google.protobuf.IMethodDescriptorProto[]|null); + /** Properties of a DoubleValue. */ + interface IDoubleValue { - /** ServiceDescriptorProto options */ - options?: (google.protobuf.IServiceOptions|null); + /** DoubleValue value */ + value?: (number|null); } - /** Represents a ServiceDescriptorProto. */ - class ServiceDescriptorProto implements IServiceDescriptorProto { + /** Represents a DoubleValue. */ + class DoubleValue implements IDoubleValue { /** - * Constructs a new ServiceDescriptorProto. + * Constructs a new DoubleValue. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IServiceDescriptorProto); - - /** ServiceDescriptorProto name. */ - public name: string; - - /** ServiceDescriptorProto method. */ - public method: google.protobuf.IMethodDescriptorProto[]; + constructor(properties?: google.protobuf.IDoubleValue); - /** ServiceDescriptorProto options. */ - public options?: (google.protobuf.IServiceOptions|null); + /** DoubleValue value. */ + public value: number; /** - * Creates a new ServiceDescriptorProto instance using the specified properties. + * Creates a new DoubleValue instance using the specified properties. * @param [properties] Properties to set - * @returns ServiceDescriptorProto instance + * @returns DoubleValue instance */ - public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + public static create(properties?: google.protobuf.IDoubleValue): google.protobuf.DoubleValue; /** - * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @param message ServiceDescriptorProto message or plain object to encode + * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @param message DoubleValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @param message ServiceDescriptorProto message or plain object to encode + * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @param message DoubleValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * Decodes a DoubleValue message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ServiceDescriptorProto + * @returns DoubleValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DoubleValue; /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a DoubleValue message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ServiceDescriptorProto + * @returns DoubleValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DoubleValue; /** - * Verifies a ServiceDescriptorProto message. + * Verifies a DoubleValue message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ServiceDescriptorProto + * @returns DoubleValue */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.DoubleValue; /** - * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. - * @param message ServiceDescriptorProto + * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. + * @param message DoubleValue * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.DoubleValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ServiceDescriptorProto to JSON. + * Converts this DoubleValue to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a MethodDescriptorProto. */ - interface IMethodDescriptorProto { - - /** MethodDescriptorProto name */ - name?: (string|null); - - /** MethodDescriptorProto inputType */ - inputType?: (string|null); - - /** MethodDescriptorProto outputType */ - outputType?: (string|null); - - /** MethodDescriptorProto options */ - options?: (google.protobuf.IMethodOptions|null); + /** Properties of a FloatValue. */ + interface IFloatValue { - /** MethodDescriptorProto clientStreaming */ - clientStreaming?: (boolean|null); - - /** MethodDescriptorProto serverStreaming */ - serverStreaming?: (boolean|null); + /** FloatValue value */ + value?: (number|null); } - /** Represents a MethodDescriptorProto. */ - class MethodDescriptorProto implements IMethodDescriptorProto { + /** Represents a FloatValue. */ + class FloatValue implements IFloatValue { /** - * Constructs a new MethodDescriptorProto. + * Constructs a new FloatValue. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IMethodDescriptorProto); - - /** MethodDescriptorProto name. */ - public name: string; - - /** MethodDescriptorProto inputType. */ - public inputType: string; - - /** MethodDescriptorProto outputType. */ - public outputType: string; - - /** MethodDescriptorProto options. */ - public options?: (google.protobuf.IMethodOptions|null); - - /** MethodDescriptorProto clientStreaming. */ - public clientStreaming: boolean; + constructor(properties?: google.protobuf.IFloatValue); - /** MethodDescriptorProto serverStreaming. */ - public serverStreaming: boolean; + /** FloatValue value. */ + public value: number; /** - * Creates a new MethodDescriptorProto instance using the specified properties. + * Creates a new FloatValue instance using the specified properties. * @param [properties] Properties to set - * @returns MethodDescriptorProto instance + * @returns FloatValue instance */ - public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + public static create(properties?: google.protobuf.IFloatValue): google.protobuf.FloatValue; /** - * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @param message MethodDescriptorProto message or plain object to encode + * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @param message FloatValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @param message MethodDescriptorProto message or plain object to encode + * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @param message FloatValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * Decodes a FloatValue message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns MethodDescriptorProto + * @returns FloatValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FloatValue; /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a FloatValue message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns MethodDescriptorProto + * @returns FloatValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FloatValue; /** - * Verifies a MethodDescriptorProto message. + * Verifies a FloatValue message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns MethodDescriptorProto + * @returns FloatValue */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + public static fromObject(object: { [k: string]: any }): google.protobuf.FloatValue; /** - * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. - * @param message MethodDescriptorProto + * Creates a plain object from a FloatValue message. Also converts values to other types if specified. + * @param message FloatValue * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.FloatValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this MethodDescriptorProto to JSON. + * Converts this FloatValue to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a FileOptions. */ - interface IFileOptions { - - /** FileOptions javaPackage */ - javaPackage?: (string|null); - - /** FileOptions javaOuterClassname */ - javaOuterClassname?: (string|null); - - /** FileOptions javaMultipleFiles */ - javaMultipleFiles?: (boolean|null); - - /** FileOptions javaGenerateEqualsAndHash */ - javaGenerateEqualsAndHash?: (boolean|null); - - /** FileOptions javaStringCheckUtf8 */ - javaStringCheckUtf8?: (boolean|null); - - /** FileOptions optimizeFor */ - optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode|null); - - /** FileOptions goPackage */ - goPackage?: (string|null); - - /** FileOptions ccGenericServices */ - ccGenericServices?: (boolean|null); - - /** FileOptions javaGenericServices */ - javaGenericServices?: (boolean|null); - - /** FileOptions pyGenericServices */ - pyGenericServices?: (boolean|null); - - /** FileOptions phpGenericServices */ - phpGenericServices?: (boolean|null); - - /** FileOptions deprecated */ - deprecated?: (boolean|null); - - /** FileOptions ccEnableArenas */ - ccEnableArenas?: (boolean|null); - - /** FileOptions objcClassPrefix */ - objcClassPrefix?: (string|null); - - /** FileOptions csharpNamespace */ - csharpNamespace?: (string|null); - - /** FileOptions swiftPrefix */ - swiftPrefix?: (string|null); - - /** FileOptions phpClassPrefix */ - phpClassPrefix?: (string|null); - - /** FileOptions phpNamespace */ - phpNamespace?: (string|null); - - /** FileOptions phpMetadataNamespace */ - phpMetadataNamespace?: (string|null); - - /** FileOptions rubyPackage */ - rubyPackage?: (string|null); - - /** FileOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** Properties of an Int64Value. */ + interface IInt64Value { - /** FileOptions .google.api.resourceDefinition */ - ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); + /** Int64Value value */ + value?: (number|Long|string|null); } - /** Represents a FileOptions. */ - class FileOptions implements IFileOptions { + /** Represents an Int64Value. */ + class Int64Value implements IInt64Value { /** - * Constructs a new FileOptions. + * Constructs a new Int64Value. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IFileOptions); + constructor(properties?: google.protobuf.IInt64Value); - /** FileOptions javaPackage. */ - public javaPackage: string; - - /** FileOptions javaOuterClassname. */ - public javaOuterClassname: string; - - /** FileOptions javaMultipleFiles. */ - public javaMultipleFiles: boolean; - - /** FileOptions javaGenerateEqualsAndHash. */ - public javaGenerateEqualsAndHash: boolean; - - /** FileOptions javaStringCheckUtf8. */ - public javaStringCheckUtf8: boolean; - - /** FileOptions optimizeFor. */ - public optimizeFor: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode); - - /** FileOptions goPackage. */ - public goPackage: string; + /** Int64Value value. */ + public value: (number|Long|string); - /** FileOptions ccGenericServices. */ - public ccGenericServices: boolean; + /** + * Creates a new Int64Value instance using the specified properties. + * @param [properties] Properties to set + * @returns Int64Value instance + */ + public static create(properties?: google.protobuf.IInt64Value): google.protobuf.Int64Value; - /** FileOptions javaGenericServices. */ - public javaGenericServices: boolean; + /** + * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @param message Int64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - /** FileOptions pyGenericServices. */ - public pyGenericServices: boolean; + /** + * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @param message Int64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - /** FileOptions phpGenericServices. */ - public phpGenericServices: boolean; + /** + * Decodes an Int64Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int64Value; - /** FileOptions deprecated. */ - public deprecated: boolean; + /** + * Decodes an Int64Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int64Value; - /** FileOptions ccEnableArenas. */ - public ccEnableArenas: boolean; + /** + * Verifies an Int64Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); - /** FileOptions objcClassPrefix. */ - public objcClassPrefix: string; + /** + * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Int64Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Int64Value; - /** FileOptions csharpNamespace. */ - public csharpNamespace: string; + /** + * Creates a plain object from an Int64Value message. Also converts values to other types if specified. + * @param message Int64Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Int64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** FileOptions swiftPrefix. */ - public swiftPrefix: string; + /** + * Converts this Int64Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } - /** FileOptions phpClassPrefix. */ - public phpClassPrefix: string; + /** Properties of a UInt64Value. */ + interface IUInt64Value { - /** FileOptions phpNamespace. */ - public phpNamespace: string; + /** UInt64Value value */ + value?: (number|Long|string|null); + } - /** FileOptions phpMetadataNamespace. */ - public phpMetadataNamespace: string; + /** Represents a UInt64Value. */ + class UInt64Value implements IUInt64Value { - /** FileOptions rubyPackage. */ - public rubyPackage: string; + /** + * Constructs a new UInt64Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUInt64Value); - /** FileOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** UInt64Value value. */ + public value: (number|Long|string); /** - * Creates a new FileOptions instance using the specified properties. + * Creates a new UInt64Value instance using the specified properties. * @param [properties] Properties to set - * @returns FileOptions instance + * @returns UInt64Value instance */ - public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + public static create(properties?: google.protobuf.IUInt64Value): google.protobuf.UInt64Value; /** - * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @param message FileOptions message or plain object to encode + * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @param message UInt64Value message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @param message FileOptions message or plain object to encode + * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @param message UInt64Value message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FileOptions message from the specified reader or buffer. + * Decodes a UInt64Value message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FileOptions + * @returns UInt64Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt64Value; /** - * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * Decodes a UInt64Value message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FileOptions + * @returns UInt64Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt64Value; /** - * Verifies a FileOptions message. + * Verifies a UInt64Value message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FileOptions + * @returns UInt64Value */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.UInt64Value; /** - * Creates a plain object from a FileOptions message. Also converts values to other types if specified. - * @param message FileOptions + * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. + * @param message UInt64Value * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.UInt64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FileOptions to JSON. + * Converts this UInt64Value to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace FileOptions { - - /** OptimizeMode enum. */ - enum OptimizeMode { - SPEED = 1, - CODE_SIZE = 2, - LITE_RUNTIME = 3 - } - } - - /** Properties of a MessageOptions. */ - interface IMessageOptions { - - /** MessageOptions messageSetWireFormat */ - messageSetWireFormat?: (boolean|null); - - /** MessageOptions noStandardDescriptorAccessor */ - noStandardDescriptorAccessor?: (boolean|null); - - /** MessageOptions deprecated */ - deprecated?: (boolean|null); - - /** MessageOptions mapEntry */ - mapEntry?: (boolean|null); - - /** MessageOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** Properties of an Int32Value. */ + interface IInt32Value { - /** MessageOptions .google.api.resource */ - ".google.api.resource"?: (google.api.IResourceDescriptor|null); + /** Int32Value value */ + value?: (number|null); } - /** Represents a MessageOptions. */ - class MessageOptions implements IMessageOptions { + /** Represents an Int32Value. */ + class Int32Value implements IInt32Value { /** - * Constructs a new MessageOptions. + * Constructs a new Int32Value. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IMessageOptions); - - /** MessageOptions messageSetWireFormat. */ - public messageSetWireFormat: boolean; - - /** MessageOptions noStandardDescriptorAccessor. */ - public noStandardDescriptorAccessor: boolean; - - /** MessageOptions deprecated. */ - public deprecated: boolean; + constructor(properties?: google.protobuf.IInt32Value); - /** MessageOptions mapEntry. */ - public mapEntry: boolean; - - /** MessageOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** Int32Value value. */ + public value: number; /** - * Creates a new MessageOptions instance using the specified properties. + * Creates a new Int32Value instance using the specified properties. * @param [properties] Properties to set - * @returns MessageOptions instance + * @returns Int32Value instance */ - public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + public static create(properties?: google.protobuf.IInt32Value): google.protobuf.Int32Value; /** - * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @param message MessageOptions message or plain object to encode + * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @param message Int32Value message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @param message MessageOptions message or plain object to encode + * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @param message Int32Value message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a MessageOptions message from the specified reader or buffer. + * Decodes an Int32Value message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns MessageOptions + * @returns Int32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int32Value; /** - * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * Decodes an Int32Value message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns MessageOptions + * @returns Int32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int32Value; /** - * Verifies a MessageOptions message. + * Verifies an Int32Value message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns MessageOptions + * @returns Int32Value */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.Int32Value; /** - * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. - * @param message MessageOptions + * Creates a plain object from an Int32Value message. Also converts values to other types if specified. + * @param message Int32Value * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.Int32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this MessageOptions to JSON. + * Converts this Int32Value to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a FieldOptions. */ - interface IFieldOptions { + /** Properties of a UInt32Value. */ + interface IUInt32Value { - /** FieldOptions ctype */ - ctype?: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType|null); + /** UInt32Value value */ + value?: (number|null); + } - /** FieldOptions packed */ - packed?: (boolean|null); + /** Represents a UInt32Value. */ + class UInt32Value implements IUInt32Value { - /** FieldOptions jstype */ - jstype?: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType|null); + /** + * Constructs a new UInt32Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUInt32Value); - /** FieldOptions lazy */ - lazy?: (boolean|null); + /** UInt32Value value. */ + public value: number; - /** FieldOptions deprecated */ - deprecated?: (boolean|null); + /** + * Creates a new UInt32Value instance using the specified properties. + * @param [properties] Properties to set + * @returns UInt32Value instance + */ + public static create(properties?: google.protobuf.IUInt32Value): google.protobuf.UInt32Value; - /** FieldOptions weak */ - weak?: (boolean|null); + /** + * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @param message UInt32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - /** FieldOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** + * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @param message UInt32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - /** FieldOptions .google.api.fieldBehavior */ - ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); + /** + * Decodes a UInt32Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt32Value; - /** FieldOptions .google.api.resourceReference */ - ".google.api.resourceReference"?: (google.api.IResourceReference|null); - } + /** + * Decodes a UInt32Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt32Value; - /** Represents a FieldOptions. */ - class FieldOptions implements IFieldOptions { + /** + * Verifies a UInt32Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); /** - * Constructs a new FieldOptions. - * @param [properties] Properties to set + * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UInt32Value */ - constructor(properties?: google.protobuf.IFieldOptions); + public static fromObject(object: { [k: string]: any }): google.protobuf.UInt32Value; - /** FieldOptions ctype. */ - public ctype: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType); + /** + * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. + * @param message UInt32Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UInt32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - /** FieldOptions packed. */ - public packed: boolean; + /** + * Converts this UInt32Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } - /** FieldOptions jstype. */ - public jstype: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType); + /** Properties of a BoolValue. */ + interface IBoolValue { - /** FieldOptions lazy. */ - public lazy: boolean; + /** BoolValue value */ + value?: (boolean|null); + } - /** FieldOptions deprecated. */ - public deprecated: boolean; + /** Represents a BoolValue. */ + class BoolValue implements IBoolValue { - /** FieldOptions weak. */ - public weak: boolean; + /** + * Constructs a new BoolValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IBoolValue); - /** FieldOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** BoolValue value. */ + public value: boolean; /** - * Creates a new FieldOptions instance using the specified properties. + * Creates a new BoolValue instance using the specified properties. * @param [properties] Properties to set - * @returns FieldOptions instance + * @returns BoolValue instance */ - public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + public static create(properties?: google.protobuf.IBoolValue): google.protobuf.BoolValue; /** - * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @param message FieldOptions message or plain object to encode + * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @param message BoolValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @param message FieldOptions message or plain object to encode + * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @param message BoolValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a FieldOptions message from the specified reader or buffer. + * Decodes a BoolValue message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns FieldOptions + * @returns BoolValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BoolValue; /** - * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * Decodes a BoolValue message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns FieldOptions + * @returns BoolValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BoolValue; /** - * Verifies a FieldOptions message. + * Verifies a BoolValue message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns FieldOptions + * @returns BoolValue */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.BoolValue; /** - * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. - * @param message FieldOptions + * Creates a plain object from a BoolValue message. Also converts values to other types if specified. + * @param message BoolValue * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.BoolValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this FieldOptions to JSON. + * Converts this BoolValue to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace FieldOptions { - - /** CType enum. */ - enum CType { - STRING = 0, - CORD = 1, - STRING_PIECE = 2 - } - - /** JSType enum. */ - enum JSType { - JS_NORMAL = 0, - JS_STRING = 1, - JS_NUMBER = 2 - } - } - - /** Properties of an OneofOptions. */ - interface IOneofOptions { + /** Properties of a StringValue. */ + interface IStringValue { - /** OneofOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** StringValue value */ + value?: (string|null); } - /** Represents an OneofOptions. */ - class OneofOptions implements IOneofOptions { + /** Represents a StringValue. */ + class StringValue implements IStringValue { /** - * Constructs a new OneofOptions. + * Constructs a new StringValue. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IOneofOptions); - - /** OneofOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + constructor(properties?: google.protobuf.IStringValue); + + /** StringValue value. */ + public value: string; /** - * Creates a new OneofOptions instance using the specified properties. + * Creates a new StringValue instance using the specified properties. * @param [properties] Properties to set - * @returns OneofOptions instance + * @returns StringValue instance */ - public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + public static create(properties?: google.protobuf.IStringValue): google.protobuf.StringValue; /** - * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @param message OneofOptions message or plain object to encode + * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @param message StringValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @param message OneofOptions message or plain object to encode + * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @param message StringValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an OneofOptions message from the specified reader or buffer. + * Decodes a StringValue message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns OneofOptions + * @returns StringValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.StringValue; /** - * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * Decodes a StringValue message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns OneofOptions + * @returns StringValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.StringValue; /** - * Verifies an OneofOptions message. + * Verifies a StringValue message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * Creates a StringValue message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns OneofOptions + * @returns StringValue */ - public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.StringValue; /** - * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. - * @param message OneofOptions + * Creates a plain object from a StringValue message. Also converts values to other types if specified. + * @param message StringValue * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.StringValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this OneofOptions to JSON. + * Converts this StringValue to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of an EnumOptions. */ - interface IEnumOptions { - - /** EnumOptions allowAlias */ - allowAlias?: (boolean|null); - - /** EnumOptions deprecated */ - deprecated?: (boolean|null); + /** Properties of a BytesValue. */ + interface IBytesValue { - /** EnumOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** BytesValue value */ + value?: (Uint8Array|string|null); } - /** Represents an EnumOptions. */ - class EnumOptions implements IEnumOptions { + /** Represents a BytesValue. */ + class BytesValue implements IBytesValue { /** - * Constructs a new EnumOptions. + * Constructs a new BytesValue. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IEnumOptions); - - /** EnumOptions allowAlias. */ - public allowAlias: boolean; - - /** EnumOptions deprecated. */ - public deprecated: boolean; + constructor(properties?: google.protobuf.IBytesValue); - /** EnumOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** BytesValue value. */ + public value: (Uint8Array|string); /** - * Creates a new EnumOptions instance using the specified properties. + * Creates a new BytesValue instance using the specified properties. * @param [properties] Properties to set - * @returns EnumOptions instance + * @returns BytesValue instance */ - public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + public static create(properties?: google.protobuf.IBytesValue): google.protobuf.BytesValue; /** - * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @param message EnumOptions message or plain object to encode + * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @param message BytesValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @param message EnumOptions message or plain object to encode + * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @param message BytesValue message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an EnumOptions message from the specified reader or buffer. + * Decodes a BytesValue message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns EnumOptions + * @returns BytesValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BytesValue; /** - * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * Decodes a BytesValue message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns EnumOptions + * @returns BytesValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BytesValue; /** - * Verifies an EnumOptions message. + * Verifies a BytesValue message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns EnumOptions + * @returns BytesValue */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.BytesValue; /** - * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. - * @param message EnumOptions + * Creates a plain object from a BytesValue message. Also converts values to other types if specified. + * @param message BytesValue * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.BytesValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this EnumOptions to JSON. + * Converts this BytesValue to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of an EnumValueOptions. */ - interface IEnumValueOptions { + /** Properties of an Any. */ + interface IAny { - /** EnumValueOptions deprecated */ - deprecated?: (boolean|null); + /** Any type_url */ + type_url?: (string|null); - /** EnumValueOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** Any value */ + value?: (Uint8Array|string|null); } - /** Represents an EnumValueOptions. */ - class EnumValueOptions implements IEnumValueOptions { + /** Represents an Any. */ + class Any implements IAny { /** - * Constructs a new EnumValueOptions. + * Constructs a new Any. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IEnumValueOptions); + constructor(properties?: google.protobuf.IAny); - /** EnumValueOptions deprecated. */ - public deprecated: boolean; + /** Any type_url. */ + public type_url: string; - /** EnumValueOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** Any value. */ + public value: (Uint8Array|string); /** - * Creates a new EnumValueOptions instance using the specified properties. + * Creates a new Any instance using the specified properties. * @param [properties] Properties to set - * @returns EnumValueOptions instance + * @returns Any instance */ - public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + public static create(properties?: google.protobuf.IAny): google.protobuf.Any; /** - * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @param message EnumValueOptions message or plain object to encode + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @param message EnumValueOptions message or plain object to encode + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an EnumValueOptions message from the specified reader or buffer. + * Decodes an Any message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns EnumValueOptions + * @returns Any * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; /** - * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * Decodes an Any message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns EnumValueOptions + * @returns Any * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; /** - * Verifies an EnumValueOptions message. + * Verifies an Any message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * Creates an Any message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns EnumValueOptions + * @returns Any */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.Any; /** - * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. - * @param message EnumValueOptions + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @param message Any * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this EnumValueOptions to JSON. + * Converts this Any to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - /** Properties of a ServiceOptions. */ - interface IServiceOptions { - - /** ServiceOptions deprecated */ - deprecated?: (boolean|null); - - /** ServiceOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** ServiceOptions .google.api.defaultHost */ - ".google.api.defaultHost"?: (string|null); - - /** ServiceOptions .google.api.oauthScopes */ - ".google.api.oauthScopes"?: (string|null); + /** Properties of an Empty. */ + interface IEmpty { } - /** Represents a ServiceOptions. */ - class ServiceOptions implements IServiceOptions { + /** Represents an Empty. */ + class Empty implements IEmpty { /** - * Constructs a new ServiceOptions. + * Constructs a new Empty. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IServiceOptions); - - /** ServiceOptions deprecated. */ - public deprecated: boolean; - - /** ServiceOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + constructor(properties?: google.protobuf.IEmpty); /** - * Creates a new ServiceOptions instance using the specified properties. + * Creates a new Empty instance using the specified properties. * @param [properties] Properties to set - * @returns ServiceOptions instance + * @returns Empty instance */ - public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; /** - * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @param message ServiceOptions message or plain object to encode + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @param message ServiceOptions message or plain object to encode + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a ServiceOptions message from the specified reader or buffer. + * Decodes an Empty message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns ServiceOptions + * @returns Empty * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; /** - * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * Decodes an Empty message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns ServiceOptions + * @returns Empty * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; /** - * Verifies a ServiceOptions message. + * Verifies an Empty message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * Creates an Empty message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns ServiceOptions + * @returns Empty */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; /** - * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. - * @param message ServiceOptions + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @param message Empty * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this ServiceOptions to JSON. + * Converts this Empty to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } + } - /** Properties of a MethodOptions. */ - interface IMethodOptions { - - /** MethodOptions deprecated */ - deprecated?: (boolean|null); - - /** MethodOptions idempotencyLevel */ - idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); + /** Namespace api. */ + namespace api { - /** MethodOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** Properties of a Http. */ + interface IHttp { - /** MethodOptions .google.api.http */ - ".google.api.http"?: (google.api.IHttpRule|null); + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); - /** MethodOptions .google.api.methodSignature */ - ".google.api.methodSignature"?: (string[]|null); + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); } - /** Represents a MethodOptions. */ - class MethodOptions implements IMethodOptions { + /** Represents a Http. */ + class Http implements IHttp { /** - * Constructs a new MethodOptions. + * Constructs a new Http. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IMethodOptions); - - /** MethodOptions deprecated. */ - public deprecated: boolean; + constructor(properties?: google.api.IHttp); - /** MethodOptions idempotencyLevel. */ - public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); + /** Http rules. */ + public rules: google.api.IHttpRule[]; - /** MethodOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; /** - * Creates a new MethodOptions instance using the specified properties. + * Creates a new Http instance using the specified properties. * @param [properties] Properties to set - * @returns MethodOptions instance + * @returns Http instance */ - public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + public static create(properties?: google.api.IHttp): google.api.Http; /** - * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @param message MethodOptions message or plain object to encode + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @param message MethodOptions message or plain object to encode + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a MethodOptions message from the specified reader or buffer. + * Decodes a Http message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns MethodOptions + * @returns Http * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; /** - * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * Decodes a Http message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns MethodOptions + * @returns Http * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; /** - * Verifies a MethodOptions message. + * Verifies a Http message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * Creates a Http message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns MethodOptions + * @returns Http */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + public static fromObject(object: { [k: string]: any }): google.api.Http; /** - * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. - * @param message MethodOptions + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this MethodOptions to JSON. + * Converts this Http to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace MethodOptions { + /** Properties of a HttpRule. */ + interface IHttpRule { - /** IdempotencyLevel enum. */ - enum IdempotencyLevel { - IDEMPOTENCY_UNKNOWN = 0, - NO_SIDE_EFFECTS = 1, - IDEMPOTENT = 2 - } - } + /** HttpRule selector */ + selector?: (string|null); - /** Properties of an UninterpretedOption. */ - interface IUninterpretedOption { + /** HttpRule get */ + get?: (string|null); - /** UninterpretedOption name */ - name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + /** HttpRule put */ + put?: (string|null); - /** UninterpretedOption identifierValue */ - identifierValue?: (string|null); + /** HttpRule post */ + post?: (string|null); - /** UninterpretedOption positiveIntValue */ - positiveIntValue?: (number|Long|string|null); + /** HttpRule delete */ + "delete"?: (string|null); - /** UninterpretedOption negativeIntValue */ - negativeIntValue?: (number|Long|string|null); + /** HttpRule patch */ + patch?: (string|null); - /** UninterpretedOption doubleValue */ - doubleValue?: (number|null); + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); - /** UninterpretedOption stringValue */ - stringValue?: (Uint8Array|string|null); + /** HttpRule body */ + body?: (string|null); - /** UninterpretedOption aggregateValue */ - aggregateValue?: (string|null); + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); } - /** Represents an UninterpretedOption. */ - class UninterpretedOption implements IUninterpretedOption { + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { /** - * Constructs a new UninterpretedOption. + * Constructs a new HttpRule. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IUninterpretedOption); + constructor(properties?: google.api.IHttpRule); - /** UninterpretedOption name. */ - public name: google.protobuf.UninterpretedOption.INamePart[]; + /** HttpRule selector. */ + public selector: string; - /** UninterpretedOption identifierValue. */ - public identifierValue: string; + /** HttpRule get. */ + public get?: (string|null); - /** UninterpretedOption positiveIntValue. */ - public positiveIntValue: (number|Long|string); + /** HttpRule put. */ + public put?: (string|null); - /** UninterpretedOption negativeIntValue. */ - public negativeIntValue: (number|Long|string); + /** HttpRule post. */ + public post?: (string|null); - /** UninterpretedOption doubleValue. */ - public doubleValue: number; + /** HttpRule delete. */ + public delete?: (string|null); - /** UninterpretedOption stringValue. */ - public stringValue: (Uint8Array|string); + /** HttpRule patch. */ + public patch?: (string|null); - /** UninterpretedOption aggregateValue. */ - public aggregateValue: string; + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); /** - * Creates a new UninterpretedOption instance using the specified properties. + * Creates a new HttpRule instance using the specified properties. * @param [properties] Properties to set - * @returns UninterpretedOption instance + * @returns HttpRule instance */ - public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; /** - * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @param message UninterpretedOption message or plain object to encode + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @param message UninterpretedOption message or plain object to encode + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an UninterpretedOption message from the specified reader or buffer. + * Decodes a HttpRule message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns UninterpretedOption + * @returns HttpRule * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; /** - * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * Decodes a HttpRule message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns UninterpretedOption + * @returns HttpRule * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; /** - * Verifies an UninterpretedOption message. + * Verifies a HttpRule message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns UninterpretedOption + * @returns HttpRule */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; /** - * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. - * @param message UninterpretedOption + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this UninterpretedOption to JSON. + * Converts this HttpRule to JSON. * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } - - namespace UninterpretedOption { - - /** Properties of a NamePart. */ - interface INamePart { - - /** NamePart namePart */ - namePart: string; - - /** NamePart isExtension */ - isExtension: boolean; - } - - /** Represents a NamePart. */ - class NamePart implements INamePart { - - /** - * Constructs a new NamePart. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.UninterpretedOption.INamePart); - - /** NamePart namePart. */ - public namePart: string; - - /** NamePart isExtension. */ - public isExtension: boolean; - - /** - * Creates a new NamePart instance using the specified properties. - * @param [properties] Properties to set - * @returns NamePart instance - */ - public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; - - /** - * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @param message NamePart message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @param message NamePart message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a NamePart message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; - - /** - * Decodes a NamePart message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; - - /** - * Verifies a NamePart message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a NamePart message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns NamePart - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; - - /** - * Creates a plain object from a NamePart message. Also converts values to other types if specified. - * @param message NamePart - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this NamePart to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } + */ + public toJSON(): { [k: string]: any }; } - /** Properties of a SourceCodeInfo. */ - interface ISourceCodeInfo { + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { - /** SourceCodeInfo location */ - location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); } - /** Represents a SourceCodeInfo. */ - class SourceCodeInfo implements ISourceCodeInfo { + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { /** - * Constructs a new SourceCodeInfo. + * Constructs a new CustomHttpPattern. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.ISourceCodeInfo); + constructor(properties?: google.api.ICustomHttpPattern); - /** SourceCodeInfo location. */ - public location: google.protobuf.SourceCodeInfo.ILocation[]; + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; /** - * Creates a new SourceCodeInfo instance using the specified properties. + * Creates a new CustomHttpPattern instance using the specified properties. * @param [properties] Properties to set - * @returns SourceCodeInfo instance + * @returns CustomHttpPattern instance */ - public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; /** - * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @param message SourceCodeInfo message or plain object to encode + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @param message SourceCodeInfo message or plain object to encode + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a SourceCodeInfo message from the specified reader or buffer. + * Decodes a CustomHttpPattern message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns SourceCodeInfo + * @returns CustomHttpPattern * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; /** - * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns SourceCodeInfo + * @returns CustomHttpPattern * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; /** - * Verifies a SourceCodeInfo message. + * Verifies a CustomHttpPattern message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns SourceCodeInfo + * @returns CustomHttpPattern */ - public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; /** - * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. - * @param message SourceCodeInfo + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this SourceCodeInfo to JSON. + * Converts this CustomHttpPattern to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace SourceCodeInfo { - - /** Properties of a Location. */ - interface ILocation { - - /** Location path */ - path?: (number[]|null); - - /** Location span */ - span?: (number[]|null); - - /** Location leadingComments */ - leadingComments?: (string|null); - - /** Location trailingComments */ - trailingComments?: (string|null); - - /** Location leadingDetachedComments */ - leadingDetachedComments?: (string[]|null); - } - - /** Represents a Location. */ - class Location implements ILocation { - - /** - * Constructs a new Location. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); - - /** Location path. */ - public path: number[]; - - /** Location span. */ - public span: number[]; + /** FieldBehavior enum. */ + enum FieldBehavior { + FIELD_BEHAVIOR_UNSPECIFIED = 0, + OPTIONAL = 1, + REQUIRED = 2, + OUTPUT_ONLY = 3, + INPUT_ONLY = 4, + IMMUTABLE = 5, + UNORDERED_LIST = 6 + } - /** Location leadingComments. */ - public leadingComments: string; + /** Properties of a ResourceDescriptor. */ + interface IResourceDescriptor { - /** Location trailingComments. */ - public trailingComments: string; + /** ResourceDescriptor type */ + type?: (string|null); - /** Location leadingDetachedComments. */ - public leadingDetachedComments: string[]; + /** ResourceDescriptor pattern */ + pattern?: (string[]|null); - /** - * Creates a new Location instance using the specified properties. - * @param [properties] Properties to set - * @returns Location instance - */ - public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + /** ResourceDescriptor nameField */ + nameField?: (string|null); - /** - * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @param message Location message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + /** ResourceDescriptor history */ + history?: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History|null); - /** - * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @param message Location message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + /** ResourceDescriptor plural */ + plural?: (string|null); - /** - * Decodes a Location message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + /** ResourceDescriptor singular */ + singular?: (string|null); - /** - * Decodes a Location message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + /** ResourceDescriptor style */ + style?: (google.api.ResourceDescriptor.Style[]|null); + } - /** - * Verifies a Location message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); + /** Represents a ResourceDescriptor. */ + class ResourceDescriptor implements IResourceDescriptor { - /** - * Creates a Location message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Location - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + /** + * Constructs a new ResourceDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceDescriptor); - /** - * Creates a plain object from a Location message. Also converts values to other types if specified. - * @param message Location - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + /** ResourceDescriptor type. */ + public type: string; - /** - * Converts this Location to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - } - } + /** ResourceDescriptor pattern. */ + public pattern: string[]; - /** Properties of a GeneratedCodeInfo. */ - interface IGeneratedCodeInfo { + /** ResourceDescriptor nameField. */ + public nameField: string; - /** GeneratedCodeInfo annotation */ - annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); - } + /** ResourceDescriptor history. */ + public history: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History); - /** Represents a GeneratedCodeInfo. */ - class GeneratedCodeInfo implements IGeneratedCodeInfo { + /** ResourceDescriptor plural. */ + public plural: string; - /** - * Constructs a new GeneratedCodeInfo. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IGeneratedCodeInfo); + /** ResourceDescriptor singular. */ + public singular: string; - /** GeneratedCodeInfo annotation. */ - public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + /** ResourceDescriptor style. */ + public style: google.api.ResourceDescriptor.Style[]; /** - * Creates a new GeneratedCodeInfo instance using the specified properties. + * Creates a new ResourceDescriptor instance using the specified properties. * @param [properties] Properties to set - * @returns GeneratedCodeInfo instance + * @returns ResourceDescriptor instance */ - public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; /** - * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @param message GeneratedCodeInfo message or plain object to encode + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @param message GeneratedCodeInfo message or plain object to encode + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * Decodes a ResourceDescriptor message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns GeneratedCodeInfo + * @returns ResourceDescriptor * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns GeneratedCodeInfo + * @returns ResourceDescriptor * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; /** - * Verifies a GeneratedCodeInfo message. + * Verifies a ResourceDescriptor message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns GeneratedCodeInfo + * @returns ResourceDescriptor */ - public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; /** - * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. - * @param message GeneratedCodeInfo + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @param message ResourceDescriptor * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this GeneratedCodeInfo to JSON. + * Converts this ResourceDescriptor to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } - namespace GeneratedCodeInfo { - - /** Properties of an Annotation. */ - interface IAnnotation { - - /** Annotation path */ - path?: (number[]|null); - - /** Annotation sourceFile */ - sourceFile?: (string|null); - - /** Annotation begin */ - begin?: (number|null); + namespace ResourceDescriptor { - /** Annotation end */ - end?: (number|null); + /** History enum. */ + enum History { + HISTORY_UNSPECIFIED = 0, + ORIGINALLY_SINGLE_PATTERN = 1, + FUTURE_MULTI_PATTERN = 2 } - /** Represents an Annotation. */ - class Annotation implements IAnnotation { - - /** - * Constructs a new Annotation. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); - - /** Annotation path. */ - public path: number[]; - - /** Annotation sourceFile. */ - public sourceFile: string; - - /** Annotation begin. */ - public begin: number; - - /** Annotation end. */ - public end: number; - - /** - * Creates a new Annotation instance using the specified properties. - * @param [properties] Properties to set - * @returns Annotation instance - */ - public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @param message Annotation message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @param message Annotation message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Annotation message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Decodes an Annotation message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Verifies an Annotation message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Annotation message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Annotation - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Creates a plain object from an Annotation message. Also converts values to other types if specified. - * @param message Annotation - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Annotation to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; + /** Style enum. */ + enum Style { + STYLE_UNSPECIFIED = 0, + DECLARATIVE_FRIENDLY = 1 } } - /** Properties of a Timestamp. */ - interface ITimestamp { + /** Properties of a ResourceReference. */ + interface IResourceReference { - /** Timestamp seconds */ - seconds?: (number|Long|string|null); + /** ResourceReference type */ + type?: (string|null); - /** Timestamp nanos */ - nanos?: (number|null); + /** ResourceReference childType */ + childType?: (string|null); } - /** Represents a Timestamp. */ - class Timestamp implements ITimestamp { + /** Represents a ResourceReference. */ + class ResourceReference implements IResourceReference { /** - * Constructs a new Timestamp. + * Constructs a new ResourceReference. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.ITimestamp); + constructor(properties?: google.api.IResourceReference); - /** Timestamp seconds. */ - public seconds: (number|Long|string); + /** ResourceReference type. */ + public type: string; - /** Timestamp nanos. */ - public nanos: number; + /** ResourceReference childType. */ + public childType: string; /** - * Creates a new Timestamp instance using the specified properties. + * Creates a new ResourceReference instance using the specified properties. * @param [properties] Properties to set - * @returns Timestamp instance + * @returns ResourceReference instance */ - public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; + public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes a Timestamp message from the specified reader or buffer. + * Decodes a ResourceReference message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns Timestamp + * @returns ResourceReference * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns Timestamp + * @returns ResourceReference * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; /** - * Verifies a Timestamp message. + * Verifies a ResourceReference message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns Timestamp + * @returns ResourceReference */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; + public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. - * @param message Timestamp + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @param message ResourceReference * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this Timestamp to JSON. + * Converts this ResourceReference to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; } + } - /** Properties of an Empty. */ - interface IEmpty { + /** Namespace rpc. */ + namespace rpc { + + /** Properties of a Status. */ + interface IStatus { + + /** Status code */ + code?: (number|null); + + /** Status message */ + message?: (string|null); + + /** Status details */ + details?: (google.protobuf.IAny[]|null); } - /** Represents an Empty. */ - class Empty implements IEmpty { + /** Represents a Status. */ + class Status implements IStatus { /** - * Constructs a new Empty. + * Constructs a new Status. * @param [properties] Properties to set */ - constructor(properties?: google.protobuf.IEmpty); + constructor(properties?: google.rpc.IStatus); + + /** Status code. */ + public code: number; + + /** Status message. */ + public message: string; + + /** Status details. */ + public details: google.protobuf.IAny[]; /** - * Creates a new Empty instance using the specified properties. + * Creates a new Status instance using the specified properties. * @param [properties] Properties to set - * @returns Empty instance + * @returns Status instance */ - public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; + public static create(properties?: google.rpc.IStatus): google.rpc.Status; /** - * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @param message Empty message or plain object to encode + * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + public static encode(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @param message Empty message or plain object to encode + * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode * @param [writer] Writer to encode to * @returns Writer */ - public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + public static encodeDelimited(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; /** - * Decodes an Empty message from the specified reader or buffer. + * Decodes a Status message from the specified reader or buffer. * @param reader Reader or buffer to decode from * @param [length] Message length if known beforehand - * @returns Empty + * @returns Status * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.rpc.Status; /** - * Decodes an Empty message from the specified reader or buffer, length delimited. + * Decodes a Status message from the specified reader or buffer, length delimited. * @param reader Reader or buffer to decode from - * @returns Empty + * @returns Status * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.rpc.Status; /** - * Verifies an Empty message. + * Verifies a Status message. * @param message Plain object to verify * @returns `null` if valid, otherwise the reason why it is not */ public static verify(message: { [k: string]: any }): (string|null); /** - * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * Creates a Status message from a plain object. Also converts values to their respective internal types. * @param object Plain object - * @returns Empty + * @returns Status */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; + public static fromObject(object: { [k: string]: any }): google.rpc.Status; /** - * Creates a plain object from an Empty message. Also converts values to other types if specified. - * @param message Empty + * Creates a plain object from a Status message. Also converts values to other types if specified. + * @param message Status * @param [options] Conversion options * @returns Plain object */ - public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; + public static toObject(message: google.rpc.Status, options?: $protobuf.IConversionOptions): { [k: string]: any }; /** - * Converts this Empty to JSON. + * Converts this Status to JSON. * @returns JSON object */ public toJSON(): { [k: string]: any }; diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index a1cde29224c..5e9a6239f21 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1145,6 +1145,404 @@ return AvroRows; })(); + v1.ProtoSchema = (function() { + + /** + * Properties of a ProtoSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IProtoSchema + * @property {google.protobuf.IDescriptorProto|null} [protoDescriptor] ProtoSchema protoDescriptor + */ + + /** + * Constructs a new ProtoSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ProtoSchema. + * @implements IProtoSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set + */ + function ProtoSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ProtoSchema protoDescriptor. + * @member {google.protobuf.IDescriptorProto|null|undefined} protoDescriptor + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @instance + */ + ProtoSchema.prototype.protoDescriptor = null; + + /** + * Creates a new ProtoSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema instance + */ + ProtoSchema.create = function create(properties) { + return new ProtoSchema(properties); + }; + + /** + * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.protoDescriptor != null && Object.hasOwnProperty.call(message, "protoDescriptor")) + $root.google.protobuf.DescriptorProto.encode(message.protoDescriptor, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ProtoSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ProtoSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) { + var error = $root.google.protobuf.DescriptorProto.verify(message.protoDescriptor); + if (error) + return "protoDescriptor." + error; + } + return null; + }; + + /** + * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + */ + ProtoSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); + if (object.protoDescriptor != null) { + if (typeof object.protoDescriptor !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ProtoSchema.protoDescriptor: object expected"); + message.protoDescriptor = $root.google.protobuf.DescriptorProto.fromObject(object.protoDescriptor); + } + return message; + }; + + /** + * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ProtoSchema} message ProtoSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ProtoSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.protoDescriptor = null; + if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) + object.protoDescriptor = $root.google.protobuf.DescriptorProto.toObject(message.protoDescriptor, options); + return object; + }; + + /** + * Converts this ProtoSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @instance + * @returns {Object.} JSON object + */ + ProtoSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ProtoSchema; + })(); + + v1.ProtoRows = (function() { + + /** + * Properties of a ProtoRows. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IProtoRows + * @property {Array.|null} [serializedRows] ProtoRows serializedRows + */ + + /** + * Constructs a new ProtoRows. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ProtoRows. + * @implements IProtoRows + * @constructor + * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set + */ + function ProtoRows(properties) { + this.serializedRows = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ProtoRows serializedRows. + * @member {Array.} serializedRows + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @instance + */ + ProtoRows.prototype.serializedRows = $util.emptyArray; + + /** + * Creates a new ProtoRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows instance + */ + ProtoRows.create = function create(properties) { + return new ProtoRows(properties); + }; + + /** + * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRows != null && message.serializedRows.length) + for (var i = 0; i < message.serializedRows.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRows[i]); + return writer; + }; + + /** + * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ProtoRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.serializedRows && message.serializedRows.length)) + message.serializedRows = []; + message.serializedRows.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ProtoRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ProtoRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ProtoRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRows != null && message.hasOwnProperty("serializedRows")) { + if (!Array.isArray(message.serializedRows)) + return "serializedRows: array expected"; + for (var i = 0; i < message.serializedRows.length; ++i) + if (!(message.serializedRows[i] && typeof message.serializedRows[i].length === "number" || $util.isString(message.serializedRows[i]))) + return "serializedRows: buffer[] expected"; + } + return null; + }; + + /** + * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + */ + ProtoRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); + if (object.serializedRows) { + if (!Array.isArray(object.serializedRows)) + throw TypeError(".google.cloud.bigquery.storage.v1.ProtoRows.serializedRows: array expected"); + message.serializedRows = []; + for (var i = 0; i < object.serializedRows.length; ++i) + if (typeof object.serializedRows[i] === "string") + $util.base64.decode(object.serializedRows[i], message.serializedRows[i] = $util.newBuffer($util.base64.length(object.serializedRows[i])), 0); + else if (object.serializedRows[i].length) + message.serializedRows[i] = object.serializedRows[i]; + } + return message; + }; + + /** + * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.ProtoRows} message ProtoRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ProtoRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.serializedRows = []; + if (message.serializedRows && message.serializedRows.length) { + object.serializedRows = []; + for (var j = 0; j < message.serializedRows.length; ++j) + object.serializedRows[j] = options.bytes === String ? $util.base64.encode(message.serializedRows[j], 0, message.serializedRows[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRows[j]) : message.serializedRows[j]; + } + return object; + }; + + /** + * Converts this ProtoRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @instance + * @returns {Object.} JSON object + */ + ProtoRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ProtoRows; + })(); + v1.BigQueryRead = (function() { /** @@ -1279,13 +1677,246 @@ return BigQueryRead; })(); - v1.CreateReadSessionRequest = (function() { + v1.BigQueryWrite = (function() { /** - * Properties of a CreateReadSessionRequest. + * Constructs a new BigQueryWrite service. * @memberof google.cloud.bigquery.storage.v1 - * @interface ICreateReadSessionRequest - * @property {string|null} [parent] CreateReadSessionRequest parent + * @classdesc Represents a BigQueryWrite + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryWrite(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryWrite.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryWrite; + + /** + * Creates new BigQueryWrite service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryWrite} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryWrite.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#createWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef CreateWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream + */ + + /** + * Calls CreateWriteStream. + * @function createWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.createWriteStream = function createWriteStream(request, callback) { + return this.rpcCall(createWriteStream, $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); + }, "name", { value: "CreateWriteStream" }); + + /** + * Calls CreateWriteStream. + * @function createWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#appendRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef AppendRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} [response] AppendRowsResponse + */ + + /** + * Calls AppendRows. + * @function appendRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback} callback Node-style callback called with the error, if any, and AppendRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.appendRows = function appendRows(request, callback) { + return this.rpcCall(appendRows, $root.google.cloud.bigquery.storage.v1.AppendRowsRequest, $root.google.cloud.bigquery.storage.v1.AppendRowsResponse, request, callback); + }, "name", { value: "AppendRows" }); + + /** + * Calls AppendRows. + * @function appendRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#getWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef GetWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream + */ + + /** + * Calls GetWriteStream. + * @function getWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.getWriteStream = function getWriteStream(request, callback) { + return this.rpcCall(getWriteStream, $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); + }, "name", { value: "GetWriteStream" }); + + /** + * Calls GetWriteStream. + * @function getWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#finalizeWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef FinalizeWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} [response] FinalizeWriteStreamResponse + */ + + /** + * Calls FinalizeWriteStream. + * @function finalizeWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback} callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.finalizeWriteStream = function finalizeWriteStream(request, callback) { + return this.rpcCall(finalizeWriteStream, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, request, callback); + }, "name", { value: "FinalizeWriteStream" }); + + /** + * Calls FinalizeWriteStream. + * @function finalizeWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#batchCommitWriteStreams}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef BatchCommitWriteStreamsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} [response] BatchCommitWriteStreamsResponse + */ + + /** + * Calls BatchCommitWriteStreams. + * @function batchCommitWriteStreams + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback} callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.batchCommitWriteStreams = function batchCommitWriteStreams(request, callback) { + return this.rpcCall(batchCommitWriteStreams, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, request, callback); + }, "name", { value: "BatchCommitWriteStreams" }); + + /** + * Calls BatchCommitWriteStreams. + * @function batchCommitWriteStreams + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#flushRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef FlushRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} [response] FlushRowsResponse + */ + + /** + * Calls FlushRows. + * @function flushRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback} callback Node-style callback called with the error, if any, and FlushRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.flushRows = function flushRows(request, callback) { + return this.rpcCall(flushRows, $root.google.cloud.bigquery.storage.v1.FlushRowsRequest, $root.google.cloud.bigquery.storage.v1.FlushRowsResponse, request, callback); + }, "name", { value: "FlushRows" }); + + /** + * Calls FlushRows. + * @function flushRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryWrite; + })(); + + v1.CreateReadSessionRequest = (function() { + + /** + * Properties of a CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ICreateReadSessionRequest + * @property {string|null} [parent] CreateReadSessionRequest parent * @property {google.cloud.bigquery.storage.v1.IReadSession|null} [readSession] CreateReadSessionRequest readSession * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount */ @@ -3173,50 +3804,25 @@ return SplitReadStreamResponse; })(); - /** - * DataFormat enum. - * @name google.cloud.bigquery.storage.v1.DataFormat - * @enum {number} - * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value - * @property {number} AVRO=1 AVRO value - * @property {number} ARROW=2 ARROW value - */ - v1.DataFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; - values[valuesById[1] = "AVRO"] = 1; - values[valuesById[2] = "ARROW"] = 2; - return values; - })(); - - v1.ReadSession = (function() { + v1.CreateWriteStreamRequest = (function() { /** - * Properties of a ReadSession. + * Properties of a CreateWriteStreamRequest. * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadSession - * @property {string|null} [name] ReadSession name - * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime - * @property {google.cloud.bigquery.storage.v1.DataFormat|null} [dataFormat] ReadSession dataFormat - * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadSession avroSchema - * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema - * @property {string|null} [table] ReadSession table - * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers - * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions - * @property {Array.|null} [streams] ReadSession streams - * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned + * @interface ICreateWriteStreamRequest + * @property {string|null} [parent] CreateWriteStreamRequest parent + * @property {google.cloud.bigquery.storage.v1.IWriteStream|null} [writeStream] CreateWriteStreamRequest writeStream */ /** - * Constructs a new ReadSession. + * Constructs a new CreateWriteStreamRequest. * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadSession. - * @implements IReadSession + * @classdesc Represents a CreateWriteStreamRequest. + * @implements ICreateWriteStreamRequest * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set */ - function ReadSession(properties) { - this.streams = []; + function CreateWriteStreamRequest(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -3224,209 +3830,345 @@ } /** - * ReadSession name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * CreateWriteStreamRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest * @instance */ - ReadSession.prototype.name = ""; + CreateWriteStreamRequest.prototype.parent = ""; /** - * ReadSession expireTime. - * @member {google.protobuf.ITimestamp|null|undefined} expireTime - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * CreateWriteStreamRequest writeStream. + * @member {google.cloud.bigquery.storage.v1.IWriteStream|null|undefined} writeStream + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest * @instance */ - ReadSession.prototype.expireTime = null; + CreateWriteStreamRequest.prototype.writeStream = null; /** - * ReadSession dataFormat. - * @member {google.cloud.bigquery.storage.v1.DataFormat} dataFormat - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Creates a new CreateWriteStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest instance */ - ReadSession.prototype.dataFormat = 0; + CreateWriteStreamRequest.create = function create(properties) { + return new CreateWriteStreamRequest(properties); + }; /** - * ReadSession avroSchema. - * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - ReadSession.prototype.avroSchema = null; + CreateWriteStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + $root.google.cloud.bigquery.storage.v1.WriteStream.encode(message.writeStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; /** - * ReadSession arrowSchema. - * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - ReadSession.prototype.arrowSchema = null; + CreateWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; /** - * ReadSession table. - * @member {string} table - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.prototype.table = ""; + CreateWriteStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.parent = reader.string(); + break; + case 2: + message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; /** - * ReadSession tableModifiers. - * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.prototype.tableModifiers = null; + CreateWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; /** - * ReadSession readOptions. - * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null|undefined} readOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance + * Verifies a CreateWriteStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ReadSession.prototype.readOptions = null; - - /** - * ReadSession streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1.ReadSession + CreateWriteStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) { + var error = $root.google.cloud.bigquery.storage.v1.WriteStream.verify(message.writeStream); + if (error) + return "writeStream." + error; + } + return null; + }; + + /** + * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + */ + CreateWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.writeStream != null) { + if (typeof object.writeStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.writeStream: object expected"); + message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.fromObject(object.writeStream); + } + return message; + }; + + /** + * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} message CreateWriteStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateWriteStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.writeStream = null; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.toObject(message.writeStream, options); + return object; + }; + + /** + * Converts this CreateWriteStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest * @instance + * @returns {Object.} JSON object */ - ReadSession.prototype.streams = $util.emptyArray; + CreateWriteStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return CreateWriteStreamRequest; + })(); + + v1.AppendRowsRequest = (function() { /** - * ReadSession estimatedTotalBytesScanned. - * @member {number|Long} estimatedTotalBytesScanned - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * Properties of an AppendRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAppendRowsRequest + * @property {string|null} [writeStream] AppendRowsRequest writeStream + * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset + * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows + * @property {string|null} [traceId] AppendRowsRequest traceId + */ + + /** + * Constructs a new AppendRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AppendRowsRequest. + * @implements IAppendRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set + */ + function AppendRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AppendRowsRequest writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @instance */ - ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + AppendRowsRequest.prototype.writeStream = ""; + + /** + * AppendRowsRequest offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.offset = null; + + /** + * AppendRowsRequest protoRows. + * @member {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null|undefined} protoRows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.protoRows = null; + + /** + * AppendRowsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.traceId = ""; // OneOf field names bound to virtual getters and setters var $oneOfFields; /** - * ReadSession schema. - * @member {"avroSchema"|"arrowSchema"|undefined} schema - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * AppendRowsRequest rows. + * @member {"protoRows"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @instance */ - Object.defineProperty(ReadSession.prototype, "schema", { - get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + Object.defineProperty(AppendRowsRequest.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["protoRows"]), set: $util.oneOfSetter($oneOfFields) }); /** - * Creates a new ReadSession instance using the specified properties. + * Creates a new AppendRowsRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession instance + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest instance */ - ReadSession.create = function create(properties) { - return new ReadSession(properties); + AppendRowsRequest.create = function create(properties) { + return new AppendRowsRequest(properties); }; /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadSession.encode = function encode(message, writer) { + AppendRowsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) - $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); - if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) - $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) - $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.table != null && Object.hasOwnProperty.call(message, "table")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) - $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); - if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) - writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.protoRows != null && Object.hasOwnProperty.call(message, "protoRows")) + $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId); return writer; }; /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadSession.encodeDelimited = function encodeDelimited(message, writer) { + AppendRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ReadSession message from the specified reader or buffer. + * Decodes an AppendRowsRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.decode = function decode(reader, length) { + AppendRowsRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.name = reader.string(); + message.writeStream = reader.string(); break; case 2: - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 3: - message.dataFormat = reader.int32(); + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); break; case 4: - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); - break; - case 5: - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); break; case 6: - message.table = reader.string(); - break; - case 7: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); - break; - case 8: - message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); - break; - case 10: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); - break; - case 12: - message.estimatedTotalBytesScanned = reader.int64(); + message.traceId = reader.string(); break; default: reader.skipType(tag & 7); @@ -3437,265 +4179,146 @@ }; /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.decodeDelimited = function decodeDelimited(reader) { + AppendRowsRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ReadSession message. + * Verifies an AppendRowsRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ReadSession.verify = function verify(message) { + AppendRowsRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; var properties = {}; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.expireTime); + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); if (error) - return "expireTime." + error; - } - if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) - switch (message.dataFormat) { - default: - return "dataFormat: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); - if (error) - return "avroSchema." + error; - } + return "offset." + error; } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - if (properties.schema === 1) - return "schema: multiple values"; - properties.schema = 1; + if (message.protoRows != null && message.hasOwnProperty("protoRows")) { + properties.rows = 1; { - var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); - if (error) - return "arrowSchema." + error; - } - } - if (message.table != null && message.hasOwnProperty("table")) - if (!$util.isString(message.table)) - return "table: string expected"; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify(message.tableModifiers); - if (error) - return "tableModifiers." + error; - } - if (message.readOptions != null && message.hasOwnProperty("readOptions")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify(message.readOptions); - if (error) - return "readOptions." + error; - } - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.streams[i]); + var error = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify(message.protoRows); if (error) - return "streams." + error; + return "protoRows." + error; } } - if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) - if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) - return "estimatedTotalBytesScanned: integer|Long expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest */ - ReadSession.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession) + AppendRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); - if (object.name != null) - message.name = String(object.name); - if (object.expireTime != null) { - if (typeof object.expireTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.expireTime: object expected"); - message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); + if (object.writeStream != null) + message.writeStream = String(object.writeStream); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); } - switch (object.dataFormat) { - case "DATA_FORMAT_UNSPECIFIED": - case 0: - message.dataFormat = 0; - break; - case "AVRO": - case 1: - message.dataFormat = 1; - break; - case "ARROW": - case 2: - message.dataFormat = 2; - break; + if (object.protoRows != null) { + if (typeof object.protoRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.protoRows: object expected"); + message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.fromObject(object.protoRows); } - if (object.avroSchema != null) { - if (typeof object.avroSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.avroSchema: object expected"); - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest} message AppendRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AppendRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.writeStream = ""; + object.offset = null; + object.traceId = ""; } - if (object.arrowSchema != null) { - if (typeof object.arrowSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.arrowSchema: object expected"); - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); - } - if (object.table != null) - message.table = String(object.table); - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.fromObject(object.tableModifiers); - } - if (object.readOptions != null) { - if (typeof object.readOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.readOptions: object expected"); - message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.fromObject(object.readOptions); - } - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); - } - } - if (object.estimatedTotalBytesScanned != null) - if ($util.Long) - (message.estimatedTotalBytesScanned = $util.Long.fromValue(object.estimatedTotalBytesScanned)).unsigned = false; - else if (typeof object.estimatedTotalBytesScanned === "string") - message.estimatedTotalBytesScanned = parseInt(object.estimatedTotalBytesScanned, 10); - else if (typeof object.estimatedTotalBytesScanned === "number") - message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; - else if (typeof object.estimatedTotalBytesScanned === "object") - message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession} message ReadSession - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadSession.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streams = []; - if (options.defaults) { - object.name = ""; - object.expireTime = null; - object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; - object.table = ""; - object.tableModifiers = null; - object.readOptions = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) - object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); - if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) - object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); - if (options.oneofs) - object.schema = "avroSchema"; - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); + if (message.protoRows != null && message.hasOwnProperty("protoRows")) { + object.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.toObject(message.protoRows, options); if (options.oneofs) - object.schema = "arrowSchema"; - } - if (message.table != null && message.hasOwnProperty("table")) - object.table = message.table; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.toObject(message.tableModifiers, options); - if (message.readOptions != null && message.hasOwnProperty("readOptions")) - object.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.toObject(message.readOptions, options); - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); + object.rows = "protoRows"; } - if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) - if (typeof message.estimatedTotalBytesScanned === "number") - object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; - else - object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; /** - * Converts this ReadSession to JSON. + * Converts this AppendRowsRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @instance * @returns {Object.} JSON object */ - ReadSession.prototype.toJSON = function toJSON() { + AppendRowsRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - ReadSession.TableModifiers = (function() { + AppendRowsRequest.ProtoData = (function() { /** - * Properties of a TableModifiers. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @interface ITableModifiers - * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + * Properties of a ProtoData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @interface IProtoData + * @property {google.cloud.bigquery.storage.v1.IProtoSchema|null} [writerSchema] ProtoData writerSchema + * @property {google.cloud.bigquery.storage.v1.IProtoRows|null} [rows] ProtoData rows */ /** - * Constructs a new TableModifiers. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @classdesc Represents a TableModifiers. - * @implements ITableModifiers + * Constructs a new ProtoData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @classdesc Represents a ProtoData. + * @implements IProtoData * @constructor - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set */ - function TableModifiers(properties) { + function ProtoData(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -3703,75 +4326,88 @@ } /** - * TableModifiers snapshotTime. - * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * ProtoData writerSchema. + * @member {google.cloud.bigquery.storage.v1.IProtoSchema|null|undefined} writerSchema + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @instance */ - TableModifiers.prototype.snapshotTime = null; + ProtoData.prototype.writerSchema = null; /** - * Creates a new TableModifiers instance using the specified properties. + * ProtoData rows. + * @member {google.cloud.bigquery.storage.v1.IProtoRows|null|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @instance + */ + ProtoData.prototype.rows = null; + + /** + * Creates a new ProtoData instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers instance + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData instance */ - TableModifiers.create = function create(properties) { - return new TableModifiers(properties); + ProtoData.create = function create(properties) { + return new ProtoData(properties); }; /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableModifiers.encode = function encode(message, writer) { + ProtoData.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) - $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.writerSchema != null && Object.hasOwnProperty.call(message, "writerSchema")) + $root.google.cloud.bigquery.storage.v1.ProtoSchema.encode(message.writerSchema, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.rows != null && Object.hasOwnProperty.call(message, "rows")) + $root.google.cloud.bigquery.storage.v1.ProtoRows.encode(message.rows, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + ProtoData.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a TableModifiers message from the specified reader or buffer. + * Decodes a ProtoData message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableModifiers.decode = function decode(reader, length) { + ProtoData.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); + break; + case 2: + message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -3782,234 +4418,475 @@ }; /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * Decodes a ProtoData message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableModifiers.decodeDelimited = function decodeDelimited(reader) { + ProtoData.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a TableModifiers message. + * Verifies a ProtoData message. * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - TableModifiers.verify = function verify(message) { + ProtoData.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.ProtoSchema.verify(message.writerSchema); if (error) - return "snapshotTime." + error; + return "writerSchema." + error; + } + if (message.rows != null && message.hasOwnProperty("rows")) { + var error = $root.google.cloud.bigquery.storage.v1.ProtoRows.verify(message.rows); + if (error) + return "rows." + error; } return null; }; /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData */ - TableModifiers.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) + ProtoData.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData) return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); - if (object.snapshotTime != null) { - if (typeof object.snapshotTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.snapshotTime: object expected"); - message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); + if (object.writerSchema != null) { + if (typeof object.writerSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.writerSchema: object expected"); + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.fromObject(object.writerSchema); + } + if (object.rows != null) { + if (typeof object.rows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.rows: object expected"); + message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.fromObject(object.rows); } return message; }; /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * Creates a plain object from a ProtoData message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} message TableModifiers + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} message ProtoData * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - TableModifiers.toObject = function toObject(message, options) { + ProtoData.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) - object.snapshotTime = null; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) - object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + if (options.defaults) { + object.writerSchema = null; + object.rows = null; + } + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) + object.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.toObject(message.writerSchema, options); + if (message.rows != null && message.hasOwnProperty("rows")) + object.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.toObject(message.rows, options); return object; }; /** - * Converts this TableModifiers to JSON. + * Converts this ProtoData to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData * @instance * @returns {Object.} JSON object */ - TableModifiers.prototype.toJSON = function toJSON() { + ProtoData.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return TableModifiers; + return ProtoData; })(); - ReadSession.TableReadOptions = (function() { + return AppendRowsRequest; + })(); - /** - * Properties of a TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @interface ITableReadOptions - * @property {Array.|null} [selectedFields] TableReadOptions selectedFields - * @property {string|null} [rowRestriction] TableReadOptions rowRestriction - * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions - */ + v1.AppendRowsResponse = (function() { - /** - * Constructs a new TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @classdesc Represents a TableReadOptions. - * @implements ITableReadOptions - * @constructor - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set - */ - function TableReadOptions(properties) { - this.selectedFields = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + /** + * Properties of an AppendRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAppendRowsResponse + * @property {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null} [appendResult] AppendRowsResponse appendResult + * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error + * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema + */ - /** - * TableReadOptions selectedFields. - * @member {Array.} selectedFields - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.selectedFields = $util.emptyArray; + /** + * Constructs a new AppendRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AppendRowsResponse. + * @implements IAppendRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set + */ + function AppendRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * TableReadOptions rowRestriction. - * @member {string} rowRestriction - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.rowRestriction = ""; + /** + * AppendRowsResponse appendResult. + * @member {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null|undefined} appendResult + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.appendResult = null; - /** - * TableReadOptions arrowSerializationOptions. - * @member {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null|undefined} arrowSerializationOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.arrowSerializationOptions = null; + /** + * AppendRowsResponse error. + * @member {google.rpc.IStatus|null|undefined} error + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.error = null; - // OneOf field names bound to virtual getters and setters - var $oneOfFields; + /** + * AppendRowsResponse updatedSchema. + * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} updatedSchema + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.updatedSchema = null; - /** - * TableReadOptions outputFormatSerializationOptions. - * @member {"arrowSerializationOptions"|undefined} outputFormatSerializationOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { - get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions"]), - set: $util.oneOfSetter($oneOfFields) - }); + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * AppendRowsResponse response. + * @member {"appendResult"|"error"|undefined} response + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + Object.defineProperty(AppendRowsResponse.prototype, "response", { + get: $util.oneOfGetter($oneOfFields = ["appendResult", "error"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new AppendRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse instance + */ + AppendRowsResponse.create = function create(properties) { + return new AppendRowsResponse(properties); + }; + + /** + * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.appendResult != null && Object.hasOwnProperty.call(message, "appendResult")) + $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.encode(message.appendResult, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.error != null && Object.hasOwnProperty.call(message, "error")) + $root.google.rpc.Status.encode(message.error, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.updatedSchema != null && Object.hasOwnProperty.call(message, "updatedSchema")) + $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.updatedSchema, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); + break; + case 2: + message.error = $root.google.rpc.Status.decode(reader, reader.uint32()); + break; + case 3: + message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AppendRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AppendRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.appendResult != null && message.hasOwnProperty("appendResult")) { + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify(message.appendResult); + if (error) + return "appendResult." + error; + } + } + if (message.error != null && message.hasOwnProperty("error")) { + if (properties.response === 1) + return "response: multiple values"; + properties.response = 1; + { + var error = $root.google.rpc.Status.verify(message.error); + if (error) + return "error." + error; + } + } + if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.updatedSchema); + if (error) + return "updatedSchema." + error; + } + return null; + }; + + /** + * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + */ + AppendRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); + if (object.appendResult != null) { + if (typeof object.appendResult !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.appendResult: object expected"); + message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.fromObject(object.appendResult); + } + if (object.error != null) { + if (typeof object.error !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.error: object expected"); + message.error = $root.google.rpc.Status.fromObject(object.error); + } + if (object.updatedSchema != null) { + if (typeof object.updatedSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.updatedSchema: object expected"); + message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.updatedSchema); + } + return message; + }; + + /** + * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} message AppendRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AppendRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.updatedSchema = null; + if (message.appendResult != null && message.hasOwnProperty("appendResult")) { + object.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.toObject(message.appendResult, options); + if (options.oneofs) + object.response = "appendResult"; + } + if (message.error != null && message.hasOwnProperty("error")) { + object.error = $root.google.rpc.Status.toObject(message.error, options); + if (options.oneofs) + object.response = "error"; + } + if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) + object.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.updatedSchema, options); + return object; + }; + + /** + * Converts this AppendRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + * @returns {Object.} JSON object + */ + AppendRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + AppendRowsResponse.AppendResult = (function() { /** - * Creates a new TableReadOptions instance using the specified properties. + * Properties of an AppendResult. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @interface IAppendResult + * @property {google.protobuf.IInt64Value|null} [offset] AppendResult offset + */ + + /** + * Constructs a new AppendResult. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @classdesc Represents an AppendResult. + * @implements IAppendResult + * @constructor + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set + */ + function AppendResult(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AppendResult offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @instance + */ + AppendResult.prototype.offset = null; + + /** + * Creates a new AppendResult instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions instance + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult instance */ - TableReadOptions.create = function create(properties) { - return new TableReadOptions(properties); + AppendResult.create = function create(properties) { + return new AppendResult(properties); }; /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReadOptions.encode = function encode(message, writer) { + AppendResult.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.selectedFields != null && message.selectedFields.length) - for (var i = 0; i < message.selectedFields.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); - if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) - $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + AppendResult.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a TableReadOptions message from the specified reader or buffer. + * Decodes an AppendResult message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decode = function decode(reader, length) { + AppendResult.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); - break; - case 2: - message.rowRestriction = reader.string(); - break; - case 3: - message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -4020,150 +4897,115 @@ }; /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * Decodes an AppendResult message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + AppendResult.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a TableReadOptions message. + * Verifies an AppendResult message. * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - TableReadOptions.verify = function verify(message) { + AppendResult.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - var properties = {}; - if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { - if (!Array.isArray(message.selectedFields)) - return "selectedFields: array expected"; - for (var i = 0; i < message.selectedFields.length; ++i) - if (!$util.isString(message.selectedFields[i])) - return "selectedFields: string[] expected"; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - if (!$util.isString(message.rowRestriction)) - return "rowRestriction: string expected"; - if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { - properties.outputFormatSerializationOptions = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify(message.arrowSerializationOptions); - if (error) - return "arrowSerializationOptions." + error; - } + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); + if (error) + return "offset." + error; } return null; }; /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult */ - TableReadOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) + AppendResult.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult) return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); - if (object.selectedFields) { - if (!Array.isArray(object.selectedFields)) - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.selectedFields: array expected"); - message.selectedFields = []; - for (var i = 0; i < object.selectedFields.length; ++i) - message.selectedFields[i] = String(object.selectedFields[i]); - } - if (object.rowRestriction != null) - message.rowRestriction = String(object.rowRestriction); - if (object.arrowSerializationOptions != null) { - if (typeof object.arrowSerializationOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); - message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); } return message; }; /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * Creates a plain object from an AppendResult message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} message TableReadOptions + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} message AppendResult * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - TableReadOptions.toObject = function toObject(message, options) { + AppendResult.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.selectedFields = []; if (options.defaults) - object.rowRestriction = ""; - if (message.selectedFields && message.selectedFields.length) { - object.selectedFields = []; - for (var j = 0; j < message.selectedFields.length; ++j) - object.selectedFields[j] = message.selectedFields[j]; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - object.rowRestriction = message.rowRestriction; - if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { - object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options); - if (options.oneofs) - object.outputFormatSerializationOptions = "arrowSerializationOptions"; - } + object.offset = null; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); return object; }; /** - * Converts this TableReadOptions to JSON. + * Converts this AppendResult to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult * @instance * @returns {Object.} JSON object */ - TableReadOptions.prototype.toJSON = function toJSON() { + AppendResult.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return TableReadOptions; + return AppendResult; })(); - return ReadSession; + return AppendRowsResponse; })(); - v1.ReadStream = (function() { + v1.GetWriteStreamRequest = (function() { /** - * Properties of a ReadStream. + * Properties of a GetWriteStreamRequest. * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadStream - * @property {string|null} [name] ReadStream name + * @interface IGetWriteStreamRequest + * @property {string|null} [name] GetWriteStreamRequest name */ /** - * Constructs a new ReadStream. + * Constructs a new GetWriteStreamRequest. * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadStream. - * @implements IReadStream + * @classdesc Represents a GetWriteStreamRequest. + * @implements IGetWriteStreamRequest * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set */ - function ReadStream(properties) { + function GetWriteStreamRequest(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4171,35 +5013,35 @@ } /** - * ReadStream name. + * GetWriteStreamRequest name. * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @instance */ - ReadStream.prototype.name = ""; + GetWriteStreamRequest.prototype.name = ""; /** - * Creates a new ReadStream instance using the specified properties. + * Creates a new GetWriteStreamRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream instance + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest instance */ - ReadStream.create = function create(properties) { - return new ReadStream(properties); + GetWriteStreamRequest.create = function create(properties) { + return new GetWriteStreamRequest(properties); }; /** - * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadStream.encode = function encode(message, writer) { + GetWriteStreamRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); if (message.name != null && Object.hasOwnProperty.call(message, "name")) @@ -4208,33 +5050,33 @@ }; /** - * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + GetWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ReadStream message from the specified reader or buffer. + * Decodes a GetWriteStreamRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadStream.decode = function decode(reader, length) { + GetWriteStreamRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -4250,30 +5092,30 @@ }; /** - * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadStream.decodeDelimited = function decodeDelimited(reader) { + GetWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ReadStream message. + * Verifies a GetWriteStreamRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ReadStream.verify = function verify(message) { + GetWriteStreamRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; if (message.name != null && message.hasOwnProperty("name")) @@ -4283,32 +5125,32 @@ }; /** - * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest */ - ReadStream.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadStream) + GetWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + var message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); if (object.name != null) message.name = String(object.name); return message; }; /** - * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1.ReadStream} message ReadStream + * @param {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} message GetWriteStreamRequest * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ReadStream.toObject = function toObject(message, options) { + GetWriteStreamRequest.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; @@ -4320,49 +5162,39 @@ }; /** - * Converts this ReadStream to JSON. + * Converts this GetWriteStreamRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest * @instance * @returns {Object.} JSON object */ - ReadStream.prototype.toJSON = function toJSON() { + GetWriteStreamRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ReadStream; + return GetWriteStreamRequest; })(); - return v1; - })(); - - storage.v1beta1 = (function() { - - /** - * Namespace v1beta1. - * @memberof google.cloud.bigquery.storage - * @namespace - */ - var v1beta1 = {}; - - v1beta1.ArrowSchema = (function() { + v1.BatchCommitWriteStreamsRequest = (function() { /** - * Properties of an ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IArrowSchema - * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema + * Properties of a BatchCommitWriteStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IBatchCommitWriteStreamsRequest + * @property {string|null} [parent] BatchCommitWriteStreamsRequest parent + * @property {Array.|null} [writeStreams] BatchCommitWriteStreamsRequest writeStreams */ /** - * Constructs a new ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an ArrowSchema. - * @implements IArrowSchema + * Constructs a new BatchCommitWriteStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BatchCommitWriteStreamsRequest. + * @implements IBatchCommitWriteStreamsRequest * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set */ - function ArrowSchema(properties) { + function BatchCommitWriteStreamsRequest(properties) { + this.writeStreams = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4370,75 +5202,91 @@ } /** - * ArrowSchema serializedSchema. - * @member {Uint8Array} serializedSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * BatchCommitWriteStreamsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @instance */ - ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); + BatchCommitWriteStreamsRequest.prototype.parent = ""; /** - * Creates a new ArrowSchema instance using the specified properties. + * BatchCommitWriteStreamsRequest writeStreams. + * @member {Array.} writeStreams + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @instance + */ + BatchCommitWriteStreamsRequest.prototype.writeStreams = $util.emptyArray; + + /** + * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest instance */ - ArrowSchema.create = function create(properties) { - return new ArrowSchema(properties); + BatchCommitWriteStreamsRequest.create = function create(properties) { + return new BatchCommitWriteStreamsRequest(properties); }; /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ArrowSchema.encode = function encode(message, writer) { + BatchCommitWriteStreamsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.writeStreams != null && message.writeStreams.length) + for (var i = 0; i < message.writeStreams.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.writeStreams[i]); return writer; }; /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { + BatchCommitWriteStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an ArrowSchema message from the specified reader or buffer. + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowSchema.decode = function decode(reader, length) { + BatchCommitWriteStreamsRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.serializedSchema = reader.bytes(); + message.parent = reader.string(); + break; + case 2: + if (!(message.writeStreams && message.writeStreams.length)) + message.writeStreams = []; + message.writeStreams.push(reader.string()); break; default: reader.skipType(tag & 7); @@ -4449,117 +5297,130 @@ }; /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowSchema.decodeDelimited = function decodeDelimited(reader) { + BatchCommitWriteStreamsRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an ArrowSchema message. + * Verifies a BatchCommitWriteStreamsRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ArrowSchema.verify = function verify(message) { + BatchCommitWriteStreamsRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) - return "serializedSchema: buffer expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.writeStreams != null && message.hasOwnProperty("writeStreams")) { + if (!Array.isArray(message.writeStreams)) + return "writeStreams: array expected"; + for (var i = 0; i < message.writeStreams.length; ++i) + if (!$util.isString(message.writeStreams[i])) + return "writeStreams: string[] expected"; + } return null; }; /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest */ - ArrowSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema) + BatchCommitWriteStreamsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); - if (object.serializedSchema != null) - if (typeof object.serializedSchema === "string") - $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); - else if (object.serializedSchema.length) - message.serializedSchema = object.serializedSchema; + var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.writeStreams) { + if (!Array.isArray(object.writeStreams)) + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.writeStreams: array expected"); + message.writeStreams = []; + for (var i = 0; i < object.writeStreams.length; ++i) + message.writeStreams[i] = String(object.writeStreams[i]); + } return message; }; /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ArrowSchema} message ArrowSchema + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ArrowSchema.toObject = function toObject(message, options) { + BatchCommitWriteStreamsRequest.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.writeStreams = []; if (options.defaults) - if (options.bytes === String) - object.serializedSchema = ""; - else { - object.serializedSchema = []; - if (options.bytes !== Array) - object.serializedSchema = $util.newBuffer(object.serializedSchema); - } - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; + object.parent = ""; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.writeStreams && message.writeStreams.length) { + object.writeStreams = []; + for (var j = 0; j < message.writeStreams.length; ++j) + object.writeStreams[j] = message.writeStreams[j]; + } return object; }; /** - * Converts this ArrowSchema to JSON. + * Converts this BatchCommitWriteStreamsRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest * @instance * @returns {Object.} JSON object */ - ArrowSchema.prototype.toJSON = function toJSON() { + BatchCommitWriteStreamsRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ArrowSchema; + return BatchCommitWriteStreamsRequest; })(); - v1beta1.ArrowRecordBatch = (function() { + v1.BatchCommitWriteStreamsResponse = (function() { /** - * Properties of an ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IArrowRecordBatch - * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch - * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount + * Properties of a BatchCommitWriteStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IBatchCommitWriteStreamsResponse + * @property {google.protobuf.ITimestamp|null} [commitTime] BatchCommitWriteStreamsResponse commitTime + * @property {Array.|null} [streamErrors] BatchCommitWriteStreamsResponse streamErrors */ /** - * Constructs a new ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an ArrowRecordBatch. - * @implements IArrowRecordBatch + * Constructs a new BatchCommitWriteStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BatchCommitWriteStreamsResponse. + * @implements IBatchCommitWriteStreamsResponse * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set */ - function ArrowRecordBatch(properties) { + function BatchCommitWriteStreamsResponse(properties) { + this.streamErrors = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4567,88 +5428,91 @@ } /** - * ArrowRecordBatch serializedRecordBatch. - * @member {Uint8Array} serializedRecordBatch - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * BatchCommitWriteStreamsResponse commitTime. + * @member {google.protobuf.ITimestamp|null|undefined} commitTime + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @instance */ - ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); + BatchCommitWriteStreamsResponse.prototype.commitTime = null; /** - * ArrowRecordBatch rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * BatchCommitWriteStreamsResponse streamErrors. + * @member {Array.} streamErrors + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @instance */ - ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + BatchCommitWriteStreamsResponse.prototype.streamErrors = $util.emptyArray; /** - * Creates a new ArrowRecordBatch instance using the specified properties. + * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse instance */ - ArrowRecordBatch.create = function create(properties) { - return new ArrowRecordBatch(properties); + BatchCommitWriteStreamsResponse.create = function create(properties) { + return new BatchCommitWriteStreamsResponse(properties); }; /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ArrowRecordBatch.encode = function encode(message, writer) { + BatchCommitWriteStreamsResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) + $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.streamErrors != null && message.streamErrors.length) + for (var i = 0; i < message.streamErrors.length; ++i) + $root.google.cloud.bigquery.storage.v1.StorageError.encode(message.streamErrors[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { + BatchCommitWriteStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowRecordBatch.decode = function decode(reader, length) { + BatchCommitWriteStreamsResponse.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.serializedRecordBatch = reader.bytes(); + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); break; case 2: - message.rowCount = reader.int64(); + if (!(message.streamErrors && message.streamErrors.length)) + message.streamErrors = []; + message.streamErrors.push($root.google.cloud.bigquery.storage.v1.StorageError.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -4659,139 +5523,138 @@ }; /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { + BatchCommitWriteStreamsResponse.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an ArrowRecordBatch message. + * Verifies a BatchCommitWriteStreamsResponse message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ArrowRecordBatch.verify = function verify(message) { + BatchCommitWriteStreamsResponse.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) - return "serializedRecordBatch: buffer expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; + if (message.commitTime != null && message.hasOwnProperty("commitTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTime); + if (error) + return "commitTime." + error; + } + if (message.streamErrors != null && message.hasOwnProperty("streamErrors")) { + if (!Array.isArray(message.streamErrors)) + return "streamErrors: array expected"; + for (var i = 0; i < message.streamErrors.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.StorageError.verify(message.streamErrors[i]); + if (error) + return "streamErrors." + error; + } + } return null; }; /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse */ - ArrowRecordBatch.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) + BatchCommitWriteStreamsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); - if (object.serializedRecordBatch != null) - if (typeof object.serializedRecordBatch === "string") - $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); - else if (object.serializedRecordBatch.length) - message.serializedRecordBatch = object.serializedRecordBatch; - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); + if (object.commitTime != null) { + if (typeof object.commitTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.commitTime: object expected"); + message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); + } + if (object.streamErrors) { + if (!Array.isArray(object.streamErrors)) + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: array expected"); + message.streamErrors = []; + for (var i = 0; i < object.streamErrors.length; ++i) { + if (typeof object.streamErrors[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: object expected"); + message.streamErrors[i] = $root.google.cloud.bigquery.storage.v1.StorageError.fromObject(object.streamErrors[i]); + } + } return message; }; /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} message ArrowRecordBatch + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ArrowRecordBatch.toObject = function toObject(message, options) { + BatchCommitWriteStreamsResponse.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedRecordBatch = ""; - else { - object.serializedRecordBatch = []; - if (options.bytes !== Array) - object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); - } - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; + if (options.arrays || options.defaults) + object.streamErrors = []; + if (options.defaults) + object.commitTime = null; + if (message.commitTime != null && message.hasOwnProperty("commitTime")) + object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); + if (message.streamErrors && message.streamErrors.length) { + object.streamErrors = []; + for (var j = 0; j < message.streamErrors.length; ++j) + object.streamErrors[j] = $root.google.cloud.bigquery.storage.v1.StorageError.toObject(message.streamErrors[j], options); } - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; return object; }; /** - * Converts this ArrowRecordBatch to JSON. + * Converts this BatchCommitWriteStreamsResponse to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse * @instance * @returns {Object.} JSON object */ - ArrowRecordBatch.prototype.toJSON = function toJSON() { + BatchCommitWriteStreamsResponse.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ArrowRecordBatch; + return BatchCommitWriteStreamsResponse; })(); - v1beta1.AvroSchema = (function() { + v1.FinalizeWriteStreamRequest = (function() { /** - * Properties of an AvroSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IAvroSchema - * @property {string|null} [schema] AvroSchema schema + * Properties of a FinalizeWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFinalizeWriteStreamRequest + * @property {string|null} [name] FinalizeWriteStreamRequest name */ /** - * Constructs a new AvroSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an AvroSchema. - * @implements IAvroSchema + * Constructs a new FinalizeWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FinalizeWriteStreamRequest. + * @implements IFinalizeWriteStreamRequest * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set */ - function AvroSchema(properties) { + function FinalizeWriteStreamRequest(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4799,75 +5662,75 @@ } /** - * AvroSchema schema. - * @member {string} schema - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * FinalizeWriteStreamRequest name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @instance */ - AvroSchema.prototype.schema = ""; + FinalizeWriteStreamRequest.prototype.name = ""; /** - * Creates a new AvroSchema instance using the specified properties. + * Creates a new FinalizeWriteStreamRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest instance */ - AvroSchema.create = function create(properties) { - return new AvroSchema(properties); + FinalizeWriteStreamRequest.create = function create(properties) { + return new FinalizeWriteStreamRequest(properties); }; /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - AvroSchema.encode = function encode(message, writer) { + FinalizeWriteStreamRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { + FinalizeWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an AvroSchema message from the specified reader or buffer. + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroSchema.decode = function decode(reader, length) { + FinalizeWriteStreamRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.schema = reader.string(); + message.name = reader.string(); break; default: reader.skipType(tag & 7); @@ -4878,108 +5741,107 @@ }; /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroSchema.decodeDelimited = function decodeDelimited(reader) { + FinalizeWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an AvroSchema message. + * Verifies a FinalizeWriteStreamRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - AvroSchema.verify = function verify(message) { + FinalizeWriteStreamRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.schema != null && message.hasOwnProperty("schema")) - if (!$util.isString(message.schema)) - return "schema: string expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; return null; }; /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest */ - AvroSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroSchema) + FinalizeWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); - if (object.schema != null) - message.schema = String(object.schema); + var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); + if (object.name != null) + message.name = String(object.name); return message; }; /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.AvroSchema} message AvroSchema + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} message FinalizeWriteStreamRequest * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - AvroSchema.toObject = function toObject(message, options) { + FinalizeWriteStreamRequest.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) - object.schema = ""; - if (message.schema != null && message.hasOwnProperty("schema")) - object.schema = message.schema; + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; return object; }; /** - * Converts this AvroSchema to JSON. + * Converts this FinalizeWriteStreamRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest * @instance * @returns {Object.} JSON object */ - AvroSchema.prototype.toJSON = function toJSON() { + FinalizeWriteStreamRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return AvroSchema; + return FinalizeWriteStreamRequest; })(); - v1beta1.AvroRows = (function() { + v1.FinalizeWriteStreamResponse = (function() { /** - * Properties of an AvroRows. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IAvroRows - * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows - * @property {number|Long|null} [rowCount] AvroRows rowCount + * Properties of a FinalizeWriteStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFinalizeWriteStreamResponse + * @property {number|Long|null} [rowCount] FinalizeWriteStreamResponse rowCount */ /** - * Constructs a new AvroRows. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an AvroRows. - * @implements IAvroRows + * Constructs a new FinalizeWriteStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FinalizeWriteStreamResponse. + * @implements IFinalizeWriteStreamResponse * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set */ - function AvroRows(properties) { + function FinalizeWriteStreamResponse(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4987,87 +5849,74 @@ } /** - * AvroRows serializedBinaryRows. - * @member {Uint8Array} serializedBinaryRows - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @instance - */ - AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); - - /** - * AvroRows rowCount. + * FinalizeWriteStreamResponse rowCount. * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @instance */ - AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + FinalizeWriteStreamResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Creates a new AvroRows instance using the specified properties. + * Creates a new FinalizeWriteStreamResponse instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse instance */ - AvroRows.create = function create(properties) { - return new AvroRows(properties); + FinalizeWriteStreamResponse.create = function create(properties) { + return new FinalizeWriteStreamResponse(properties); }; /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - AvroRows.encode = function encode(message, writer) { + FinalizeWriteStreamResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.rowCount); return writer; }; /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - AvroRows.encodeDelimited = function encodeDelimited(message, writer) { + FinalizeWriteStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an AvroRows message from the specified reader or buffer. + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroRows.decode = function decode(reader, length) { + FinalizeWriteStreamResponse.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.serializedBinaryRows = reader.bytes(); - break; - case 2: message.rowCount = reader.int64(); break; default: @@ -5079,35 +5928,32 @@ }; /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroRows.decodeDelimited = function decodeDelimited(reader) { + FinalizeWriteStreamResponse.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an AvroRows message. + * Verifies a FinalizeWriteStreamResponse message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - AvroRows.verify = function verify(message) { + FinalizeWriteStreamResponse.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) - return "serializedBinaryRows: buffer expected"; if (message.rowCount != null && message.hasOwnProperty("rowCount")) if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) return "rowCount: integer|Long expected"; @@ -5115,22 +5961,17 @@ }; /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse */ - AvroRows.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroRows) + FinalizeWriteStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); - if (object.serializedBinaryRows != null) - if (typeof object.serializedBinaryRows === "string") - $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); - else if (object.serializedBinaryRows.length) - message.serializedBinaryRows = object.serializedBinaryRows; + var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); if (object.rowCount != null) if ($util.Long) (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; @@ -5144,34 +5985,24 @@ }; /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @static - * @param {google.cloud.bigquery.storage.v1beta1.AvroRows} message AvroRows + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} message FinalizeWriteStreamResponse * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - AvroRows.toObject = function toObject(message, options) { + FinalizeWriteStreamResponse.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedBinaryRows = ""; - else { - object.serializedBinaryRows = []; - if (options.bytes !== Array) - object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); - } + if (options.defaults) if ($util.Long) { var long = new $util.Long(0, 0, false); object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else object.rowCount = options.longs === String ? "0" : 0; - } - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; if (message.rowCount != null && message.hasOwnProperty("rowCount")) if (typeof message.rowCount === "number") object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; @@ -5181,39 +6012,38 @@ }; /** - * Converts this AvroRows to JSON. + * Converts this FinalizeWriteStreamResponse to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse * @instance * @returns {Object.} JSON object */ - AvroRows.prototype.toJSON = function toJSON() { + FinalizeWriteStreamResponse.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return AvroRows; + return FinalizeWriteStreamResponse; })(); - v1beta1.TableReadOptions = (function() { + v1.FlushRowsRequest = (function() { /** - * Properties of a TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableReadOptions - * @property {Array.|null} [selectedFields] TableReadOptions selectedFields - * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + * Properties of a FlushRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFlushRowsRequest + * @property {string|null} [writeStream] FlushRowsRequest writeStream + * @property {google.protobuf.IInt64Value|null} [offset] FlushRowsRequest offset */ /** - * Constructs a new TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableReadOptions. - * @implements ITableReadOptions + * Constructs a new FlushRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FlushRowsRequest. + * @implements IFlushRowsRequest * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set */ - function TableReadOptions(properties) { - this.selectedFields = []; + function FlushRowsRequest(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -5221,91 +6051,88 @@ } /** - * TableReadOptions selectedFields. - * @member {Array.} selectedFields - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * FlushRowsRequest writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @instance */ - TableReadOptions.prototype.selectedFields = $util.emptyArray; + FlushRowsRequest.prototype.writeStream = ""; /** - * TableReadOptions rowRestriction. - * @member {string} rowRestriction - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * FlushRowsRequest offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @instance */ - TableReadOptions.prototype.rowRestriction = ""; + FlushRowsRequest.prototype.offset = null; /** - * Creates a new TableReadOptions instance using the specified properties. + * Creates a new FlushRowsRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions instance + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest instance */ - TableReadOptions.create = function create(properties) { - return new TableReadOptions(properties); + FlushRowsRequest.create = function create(properties) { + return new FlushRowsRequest(properties); }; /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReadOptions.encode = function encode(message, writer) { + FlushRowsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.selectedFields != null && message.selectedFields.length) - for (var i = 0; i < message.selectedFields.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + FlushRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a TableReadOptions message from the specified reader or buffer. + * Decodes a FlushRowsRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decode = function decode(reader, length) { + FlushRowsRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); + message.writeStream = reader.string(); break; case 2: - message.rowRestriction = reader.string(); + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -5316,404 +6143,426 @@ }; /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + FlushRowsRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a TableReadOptions message. + * Verifies a FlushRowsRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - TableReadOptions.verify = function verify(message) { + FlushRowsRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { - if (!Array.isArray(message.selectedFields)) - return "selectedFields: array expected"; - for (var i = 0; i < message.selectedFields.length; ++i) - if (!$util.isString(message.selectedFields[i])) - return "selectedFields: string[] expected"; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); + if (error) + return "offset." + error; } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - if (!$util.isString(message.rowRestriction)) - return "rowRestriction: string expected"; return null; }; /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest */ - TableReadOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions) + FlushRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); - if (object.selectedFields) { - if (!Array.isArray(object.selectedFields)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableReadOptions.selectedFields: array expected"); - message.selectedFields = []; - for (var i = 0; i < object.selectedFields.length; ++i) - message.selectedFields[i] = String(object.selectedFields[i]); + var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); + if (object.writeStream != null) + message.writeStream = String(object.writeStream); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.FlushRowsRequest.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); } - if (object.rowRestriction != null) - message.rowRestriction = String(object.rowRestriction); return message; }; /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} message TableReadOptions + * @param {google.cloud.bigquery.storage.v1.FlushRowsRequest} message FlushRowsRequest * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - TableReadOptions.toObject = function toObject(message, options) { + FlushRowsRequest.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.selectedFields = []; - if (options.defaults) - object.rowRestriction = ""; - if (message.selectedFields && message.selectedFields.length) { - object.selectedFields = []; - for (var j = 0; j < message.selectedFields.length; ++j) - object.selectedFields[j] = message.selectedFields[j]; + if (options.defaults) { + object.writeStream = ""; + object.offset = null; } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - object.rowRestriction = message.rowRestriction; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); return object; }; /** - * Converts this TableReadOptions to JSON. + * Converts this FlushRowsRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest * @instance * @returns {Object.} JSON object */ - TableReadOptions.prototype.toJSON = function toJSON() { + FlushRowsRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return TableReadOptions; + return FlushRowsRequest; })(); - v1beta1.BigQueryStorage = (function() { + v1.FlushRowsResponse = (function() { /** - * Constructs a new BigQueryStorage service. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BigQueryStorage - * @extends $protobuf.rpc.Service + * Properties of a FlushRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFlushRowsResponse + * @property {number|Long|null} [offset] FlushRowsResponse offset + */ + + /** + * Constructs a new FlushRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FlushRowsResponse. + * @implements IFlushRowsResponse * @constructor - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set */ - function BigQueryStorage(rpcImpl, requestDelimited, responseDelimited) { - $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + function FlushRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; } - (BigQueryStorage.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryStorage; + /** + * FlushRowsResponse offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @instance + */ + FlushRowsResponse.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Creates new BigQueryStorage service using the specified rpc implementation. + * Creates a new FlushRowsResponse instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse * @static - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - * @returns {BigQueryStorage} RPC service. Useful where requests and/or responses are streamed. + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse instance */ - BigQueryStorage.create = function create(rpcImpl, requestDelimited, responseDelimited) { - return new this(rpcImpl, requestDelimited, responseDelimited); + FlushRowsResponse.create = function create(properties) { + return new FlushRowsResponse(properties); }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef CreateReadSessionCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} [response] ReadSession + * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ + FlushRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.offset); + return writer; + }; /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession - * @returns {undefined} - * @variation 1 + * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - Object.defineProperty(BigQueryStorage.prototype.createReadSession = function createReadSession(request, callback) { - return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadSession, request, callback); - }, "name", { value: "CreateReadSession" }); + FlushRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @returns {Promise} Promise - * @variation 2 + * Decodes a FlushRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ + FlushRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.offset = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef ReadRowsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} [response] ReadRowsResponse + * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ + FlushRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse - * @returns {undefined} - * @variation 1 + * Verifies a FlushRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Object.defineProperty(BigQueryStorage.prototype.readRows = function readRows(request, callback) { - return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, request, callback); - }, "name", { value: "ReadRows" }); + FlushRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 + * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse */ + FlushRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef BatchCreateReadSessionStreamsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} [response] BatchCreateReadSessionStreamsResponse + * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} message FlushRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object */ + FlushRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; /** - * Calls BatchCreateReadSessionStreams. - * @function batchCreateReadSessionStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * Converts this FlushRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback} callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse - * @returns {undefined} - * @variation 1 + * @returns {Object.} JSON object */ - Object.defineProperty(BigQueryStorage.prototype.batchCreateReadSessionStreams = function batchCreateReadSessionStreams(request, callback) { - return this.rpcCall(batchCreateReadSessionStreams, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, request, callback); - }, "name", { value: "BatchCreateReadSessionStreams" }); + FlushRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Calls BatchCreateReadSessionStreams. - * @function batchCreateReadSessionStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ + return FlushRowsResponse; + })(); + + v1.StorageError = (function() { /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef FinalizeStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.protobuf.Empty} [response] Empty + * Properties of a StorageError. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IStorageError + * @property {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null} [code] StorageError code + * @property {string|null} [entity] StorageError entity + * @property {string|null} [errorMessage] StorageError errorMessage */ /** - * Calls FinalizeStream. - * @function finalizeStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback} callback Node-style callback called with the error, if any, and Empty - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.finalizeStream = function finalizeStream(request, callback) { - return this.rpcCall(finalizeStream, $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, $root.google.protobuf.Empty, request, callback); - }, "name", { value: "FinalizeStream" }); - - /** - * Calls FinalizeStream. - * @function finalizeStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef SplitReadStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} [response] SplitReadStreamResponse + * Constructs a new StorageError. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a StorageError. + * @implements IStorageError + * @constructor + * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set */ + function StorageError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * StorageError code. + * @member {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode} code + * @memberof google.cloud.bigquery.storage.v1.StorageError * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse - * @returns {undefined} - * @variation 1 */ - Object.defineProperty(BigQueryStorage.prototype.splitReadStream = function splitReadStream(request, callback) { - return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, request, callback); - }, "name", { value: "SplitReadStream" }); + StorageError.prototype.code = 0; /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * StorageError entity. + * @member {string} entity + * @memberof google.cloud.bigquery.storage.v1.StorageError * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - return BigQueryStorage; - })(); - - v1beta1.Stream = (function() { - - /** - * Properties of a Stream. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStream - * @property {string|null} [name] Stream name - */ - - /** - * Constructs a new Stream. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a Stream. - * @implements IStream - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set */ - function Stream(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + StorageError.prototype.entity = ""; /** - * Stream name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * StorageError errorMessage. + * @member {string} errorMessage + * @memberof google.cloud.bigquery.storage.v1.StorageError * @instance */ - Stream.prototype.name = ""; + StorageError.prototype.errorMessage = ""; /** - * Creates a new Stream instance using the specified properties. + * Creates a new StorageError instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream instance + * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError instance */ - Stream.create = function create(properties) { - return new Stream(properties); + StorageError.create = function create(properties) { + return new StorageError(properties); }; /** - * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Stream.encode = function encode(message, writer) { + StorageError.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); + if (message.entity != null && Object.hasOwnProperty.call(message, "entity")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.entity); + if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.errorMessage); return writer; }; /** - * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Stream.encodeDelimited = function encodeDelimited(message, writer) { + StorageError.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a Stream message from the specified reader or buffer. + * Decodes a StorageError message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Stream.decode = function decode(reader, length) { + StorageError.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StorageError(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.name = reader.string(); + message.code = reader.int32(); + break; + case 2: + message.entity = reader.string(); + break; + case 3: + message.errorMessage = reader.string(); break; default: reader.skipType(tag & 7); @@ -5724,108 +6573,219 @@ }; /** - * Decodes a Stream message from the specified reader or buffer, length delimited. + * Decodes a StorageError message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Stream.decodeDelimited = function decodeDelimited(reader) { + StorageError.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a Stream message. + * Verifies a StorageError message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Stream.verify = function verify(message) { + StorageError.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; + if (message.code != null && message.hasOwnProperty("code")) + switch (message.code) { + default: + return "code: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + break; + } + if (message.entity != null && message.hasOwnProperty("entity")) + if (!$util.isString(message.entity)) + return "entity: string expected"; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + if (!$util.isString(message.errorMessage)) + return "errorMessage: string expected"; return null; }; /** - * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * Creates a StorageError message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError */ - Stream.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Stream) + StorageError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StorageError) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); - if (object.name != null) - message.name = String(object.name); + var message = new $root.google.cloud.bigquery.storage.v1.StorageError(); + switch (object.code) { + case "STORAGE_ERROR_CODE_UNSPECIFIED": + case 0: + message.code = 0; + break; + case "TABLE_NOT_FOUND": + case 1: + message.code = 1; + break; + case "STREAM_ALREADY_COMMITTED": + case 2: + message.code = 2; + break; + case "STREAM_NOT_FOUND": + case 3: + message.code = 3; + break; + case "INVALID_STREAM_TYPE": + case 4: + message.code = 4; + break; + case "INVALID_STREAM_STATE": + case 5: + message.code = 5; + break; + case "STREAM_FINALIZED": + case 6: + message.code = 6; + break; + case "SCHEMA_MISMATCH_EXTRA_FIELDS": + case 7: + message.code = 7; + break; + } + if (object.entity != null) + message.entity = String(object.entity); + if (object.errorMessage != null) + message.errorMessage = String(object.errorMessage); return message; }; /** - * Creates a plain object from a Stream message. Also converts values to other types if specified. + * Creates a plain object from a StorageError message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @static - * @param {google.cloud.bigquery.storage.v1beta1.Stream} message Stream + * @param {google.cloud.bigquery.storage.v1.StorageError} message StorageError * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - Stream.toObject = function toObject(message, options) { + StorageError.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) - object.name = ""; - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; + if (options.defaults) { + object.code = options.enums === String ? "STORAGE_ERROR_CODE_UNSPECIFIED" : 0; + object.entity = ""; + object.errorMessage = ""; + } + if (message.code != null && message.hasOwnProperty("code")) + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] : message.code; + if (message.entity != null && message.hasOwnProperty("entity")) + object.entity = message.entity; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + object.errorMessage = message.errorMessage; return object; }; /** - * Converts this Stream to JSON. + * Converts this StorageError to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @memberof google.cloud.bigquery.storage.v1.StorageError * @instance * @returns {Object.} JSON object */ - Stream.prototype.toJSON = function toJSON() { + StorageError.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return Stream; + /** + * StorageErrorCode enum. + * @name google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode + * @enum {number} + * @property {number} STORAGE_ERROR_CODE_UNSPECIFIED=0 STORAGE_ERROR_CODE_UNSPECIFIED value + * @property {number} TABLE_NOT_FOUND=1 TABLE_NOT_FOUND value + * @property {number} STREAM_ALREADY_COMMITTED=2 STREAM_ALREADY_COMMITTED value + * @property {number} STREAM_NOT_FOUND=3 STREAM_NOT_FOUND value + * @property {number} INVALID_STREAM_TYPE=4 INVALID_STREAM_TYPE value + * @property {number} INVALID_STREAM_STATE=5 INVALID_STREAM_STATE value + * @property {number} STREAM_FINALIZED=6 STREAM_FINALIZED value + * @property {number} SCHEMA_MISMATCH_EXTRA_FIELDS=7 SCHEMA_MISMATCH_EXTRA_FIELDS value + */ + StorageError.StorageErrorCode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STORAGE_ERROR_CODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "TABLE_NOT_FOUND"] = 1; + values[valuesById[2] = "STREAM_ALREADY_COMMITTED"] = 2; + values[valuesById[3] = "STREAM_NOT_FOUND"] = 3; + values[valuesById[4] = "INVALID_STREAM_TYPE"] = 4; + values[valuesById[5] = "INVALID_STREAM_STATE"] = 5; + values[valuesById[6] = "STREAM_FINALIZED"] = 6; + values[valuesById[7] = "SCHEMA_MISMATCH_EXTRA_FIELDS"] = 7; + return values; + })(); + + return StorageError; })(); - v1beta1.StreamPosition = (function() { + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1.DataFormat + * @enum {number} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=2 ARROW value + */ + v1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[2] = "ARROW"] = 2; + return values; + })(); + + v1.ReadSession = (function() { /** - * Properties of a StreamPosition. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStreamPosition - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] StreamPosition stream - * @property {number|Long|null} [offset] StreamPosition offset + * Properties of a ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1.DataFormat|null} [dataFormat] ReadSession dataFormat + * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {string|null} [table] ReadSession table + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions + * @property {Array.|null} [streams] ReadSession streams + * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned */ /** - * Constructs a new StreamPosition. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a StreamPosition. - * @implements IStreamPosition + * Constructs a new ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadSession. + * @implements IReadSession * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set */ - function StreamPosition(properties) { + function ReadSession(properties) { + this.streams = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -5833,88 +6793,209 @@ } /** - * StreamPosition stream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @instance */ - StreamPosition.prototype.stream = null; + ReadSession.prototype.name = ""; /** - * StreamPosition offset. - * @member {number|Long} offset - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @instance */ - StreamPosition.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + ReadSession.prototype.expireTime = null; /** - * Creates a new StreamPosition instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition instance + * ReadSession dataFormat. + * @member {google.cloud.bigquery.storage.v1.DataFormat} dataFormat + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance */ - StreamPosition.create = function create(properties) { - return new StreamPosition(properties); - }; + ReadSession.prototype.dataFormat = 0; /** - * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamPosition.encode = function encode(message, writer) { + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession table. + * @member {string} table + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.table = ""; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession readOptions. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.readOptions = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + /** + * ReadSession estimatedTotalBytesScanned. + * @member {number|Long} estimatedTotalBytesScanned + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession instance + */ + ReadSession.create = function create(properties) { + return new ReadSession(properties); + }; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.table != null && Object.hasOwnProperty.call(message, "table")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) + writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); return writer; }; /** - * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - StreamPosition.encodeDelimited = function encodeDelimited(message, writer) { + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a StreamPosition message from the specified reader or buffer. + * Decodes a ReadSession message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamPosition.decode = function decode(reader, length) { + ReadSession.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + message.name = reader.string(); break; case 2: - message.offset = reader.int64(); + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 3: + message.dataFormat = reader.int32(); + break; + case 4: + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + case 5: + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + case 6: + message.table = reader.string(); + break; + case 7: + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); + break; + case 8: + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); + break; + case 10: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); + break; + case 12: + message.estimatedTotalBytesScanned = reader.int64(); break; default: reader.skipType(tag & 7); @@ -5925,575 +7006,733 @@ }; /** - * Decodes a StreamPosition message from the specified reader or buffer, length delimited. + * Decodes a ReadSession message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamPosition.decodeDelimited = function decodeDelimited(reader) { + ReadSession.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a StreamPosition message. + * Verifies a ReadSession message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - StreamPosition.verify = function verify(message) { + ReadSession.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.stream != null && message.hasOwnProperty("stream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); if (error) - return "stream." + error; + return "expireTime." + error; } - if (message.offset != null && message.hasOwnProperty("offset")) - if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) - return "offset: integer|Long expected"; + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + switch (message.dataFormat) { + default: + return "dataFormat: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.table != null && message.hasOwnProperty("table")) + if (!$util.isString(message.table)) + return "table: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) + return "estimatedTotalBytesScanned: integer|Long expected"; return null; }; /** - * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession */ - StreamPosition.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamPosition) + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); - if (object.stream != null) { - if (typeof object.stream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamPosition.stream: object expected"); - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); } - if (object.offset != null) + switch (object.dataFormat) { + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.dataFormat = 0; + break; + case "AVRO": + case 1: + message.dataFormat = 1; + break; + case "ARROW": + case 2: + message.dataFormat = 2; + break; + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.table != null) + message.table = String(object.table); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.fromObject(object.tableModifiers); + } + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.fromObject(object.readOptions); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); + } + } + if (object.estimatedTotalBytesScanned != null) if ($util.Long) - (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; - else if (typeof object.offset === "string") - message.offset = parseInt(object.offset, 10); - else if (typeof object.offset === "number") - message.offset = object.offset; - else if (typeof object.offset === "object") - message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + (message.estimatedTotalBytesScanned = $util.Long.fromValue(object.estimatedTotalBytesScanned)).unsigned = false; + else if (typeof object.estimatedTotalBytesScanned === "string") + message.estimatedTotalBytesScanned = parseInt(object.estimatedTotalBytesScanned, 10); + else if (typeof object.estimatedTotalBytesScanned === "number") + message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; + else if (typeof object.estimatedTotalBytesScanned === "object") + message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); return message; }; /** - * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} message StreamPosition + * @param {google.cloud.bigquery.storage.v1.ReadSession} message ReadSession * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - StreamPosition.toObject = function toObject(message, options) { + ReadSession.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.streams = []; if (options.defaults) { - object.stream = null; + object.name = ""; + object.expireTime = null; + object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.table = ""; + object.tableModifiers = null; + object.readOptions = null; if ($util.Long) { var long = new $util.Long(0, 0, false); - object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else - object.offset = options.longs === String ? "0" : 0; + object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; } - if (message.stream != null && message.hasOwnProperty("stream")) - object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); - if (message.offset != null && message.hasOwnProperty("offset")) - if (typeof message.offset === "number") - object.offset = options.longs === String ? String(message.offset) : message.offset; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.table != null && message.hasOwnProperty("table")) + object.table = message.table; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.toObject(message.tableModifiers, options); + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.toObject(message.readOptions, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); + } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (typeof message.estimatedTotalBytesScanned === "number") + object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; else - object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; return object; }; /** - * Converts this StreamPosition to JSON. + * Converts this ReadSession to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @memberof google.cloud.bigquery.storage.v1.ReadSession * @instance * @returns {Object.} JSON object */ - StreamPosition.prototype.toJSON = function toJSON() { + ReadSession.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return StreamPosition; - })(); + ReadSession.TableModifiers = (function() { - v1beta1.ReadSession = (function() { + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ - /** - * Properties of a ReadSession. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadSession - * @property {string|null} [name] ReadSession name - * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime - * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadSession avroSchema - * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema - * @property {Array.|null} [streams] ReadSession streams - * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] ReadSession tableReference - * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers - * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] ReadSession shardingStrategy - */ + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Constructs a new ReadSession. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadSession. - * @implements IReadSession - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set - */ - function ReadSession(properties) { - this.streams = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; - /** - * ReadSession name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.name = ""; + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; - /** - * ReadSession expireTime. - * @member {google.protobuf.ITimestamp|null|undefined} expireTime - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.expireTime = null; + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; - /** - * ReadSession avroSchema. - * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.avroSchema = null; + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; - /** - * ReadSession arrowSchema. - * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.arrowSchema = null; + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; - /** - * ReadSession streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.streams = $util.emptyArray; + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * ReadSession tableReference. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.tableReference = null; + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; - /** - * ReadSession tableModifiers. - * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.tableModifiers = null; + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; - /** - * ReadSession shardingStrategy. - * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.shardingStrategy = 0; + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; - // OneOf field names bound to virtual getters and setters - var $oneOfFields; + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * ReadSession schema. - * @member {"avroSchema"|"arrowSchema"|undefined} schema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - Object.defineProperty(ReadSession.prototype, "schema", { - get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), - set: $util.oneOfSetter($oneOfFields) - }); + return TableModifiers; + })(); - /** - * Creates a new ReadSession instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession instance - */ - ReadSession.create = function create(properties) { - return new ReadSession(properties); - }; + ReadSession.TableReadOptions = (function() { - /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) - $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) - $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) - $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) - $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); - return writer; - }; + /** + * Properties of a TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions + */ - /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * Constructs a new TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + */ + function TableReadOptions(properties) { + this.selectedFields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Decodes a ReadSession message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 5: - message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); - break; - case 6: - message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); - break; - case 4: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); - break; - case 7: - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - case 8: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); - break; - case 9: - message.shardingStrategy = reader.int32(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; + /** + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.selectedFields = $util.emptyArray; - /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; - /** - * Verifies a ReadSession message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadSession.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.expireTime); - if (error) - return "expireTime." + error; - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); - if (error) - return "avroSchema." + error; + /** + * TableReadOptions arrowSerializationOptions. + * @member {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null|undefined} arrowSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.arrowSerializationOptions = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * TableReadOptions outputFormatSerializationOptions. + * @member {"arrowSerializationOptions"|undefined} outputFormatSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { + get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions instance + */ + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); + }; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) + $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + case 2: + message.rowRestriction = reader.string(); + break; + case 3: + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } } - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - if (properties.schema === 1) - return "schema: multiple values"; - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); - if (error) - return "arrowSchema." + error; + return message; + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReadOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReadOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; } - } - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); - if (error) - return "streams." + error; + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + properties.outputFormatSerializationOptions = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify(message.arrowSerializationOptions); + if (error) + return "arrowSerializationOptions." + error; + } } - } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); - if (error) - return "tableReference." + error; - } - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); - if (error) - return "tableModifiers." + error; - } - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - switch (message.shardingStrategy) { - default: - return "shardingStrategy: enum value expected"; - case 0: - case 1: - case 2: - break; + return null; + }; + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + */ + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); } - return null; - }; + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); + if (object.arrowSerializationOptions != null) { + if (typeof object.arrowSerializationOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); + } + return message; + }; - /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - */ - ReadSession.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadSession) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); - if (object.name != null) - message.name = String(object.name); - if (object.expireTime != null) { - if (typeof object.expireTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.expireTime: object expected"); - message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); - } - if (object.avroSchema != null) { - if (typeof object.avroSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.avroSchema: object expected"); - message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); - } - if (object.arrowSchema != null) { - if (typeof object.arrowSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.arrowSchema: object expected"); - message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); - } - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} message TableReadOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReadOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; } - } - if (object.tableReference != null) { - if (typeof object.tableReference !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableReference: object expected"); - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); - } - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); - } - switch (object.shardingStrategy) { - case "SHARDING_STRATEGY_UNSPECIFIED": - case 0: - message.shardingStrategy = 0; - break; - case "LIQUID": - case 1: - message.shardingStrategy = 1; - break; - case "BALANCED": - case 2: - message.shardingStrategy = 2; - break; - } - return message; - }; + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options); + if (options.oneofs) + object.outputFormatSerializationOptions = "arrowSerializationOptions"; + } + return object; + }; - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} message ReadSession - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadSession.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streams = []; - if (options.defaults) { - object.name = ""; - object.expireTime = null; - object.tableReference = null; - object.tableModifiers = null; - object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) - object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); - if (options.oneofs) - object.schema = "avroSchema"; - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); - if (options.oneofs) - object.schema = "arrowSchema"; - } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) - object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; - return object; - }; + /** + * Converts this TableReadOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + * @returns {Object.} JSON object + */ + TableReadOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this ReadSession to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - * @returns {Object.} JSON object - */ - ReadSession.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + return TableReadOptions; + })(); return ReadSession; })(); - v1beta1.CreateReadSessionRequest = (function() { + v1.ReadStream = (function() { /** - * Properties of a CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ICreateReadSessionRequest - * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] CreateReadSessionRequest tableReference - * @property {string|null} [parent] CreateReadSessionRequest parent - * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] CreateReadSessionRequest tableModifiers - * @property {number|null} [requestedStreams] CreateReadSessionRequest requestedStreams - * @property {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null} [readOptions] CreateReadSessionRequest readOptions - * @property {google.cloud.bigquery.storage.v1beta1.DataFormat|null} [format] CreateReadSessionRequest format - * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] CreateReadSessionRequest shardingStrategy + * Properties of a ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadStream + * @property {string|null} [name] ReadStream name */ /** - * Constructs a new CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a CreateReadSessionRequest. - * @implements ICreateReadSessionRequest + * Constructs a new ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadStream. + * @implements IReadStream * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set */ - function CreateReadSessionRequest(properties) { + function ReadStream(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -6501,153 +7740,318 @@ } /** - * CreateReadSessionRequest tableReference. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * ReadStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadStream * @instance */ - CreateReadSessionRequest.prototype.tableReference = null; + ReadStream.prototype.name = ""; /** - * CreateReadSessionRequest parent. - * @member {string} parent - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance + * Creates a new ReadStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream instance */ - CreateReadSessionRequest.prototype.parent = ""; + ReadStream.create = function create(properties) { + return new ReadStream(properties); + }; /** - * CreateReadSessionRequest tableModifiers. - * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + */ + ReadStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.ReadStream} message ReadStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this ReadStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadStream * @instance + * @returns {Object.} JSON object */ - CreateReadSessionRequest.prototype.tableModifiers = null; + ReadStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadStream; + })(); + + v1.WriteStream = (function() { /** - * CreateReadSessionRequest requestedStreams. - * @member {number} requestedStreams - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * Properties of a WriteStream. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IWriteStream + * @property {string|null} [name] WriteStream name + * @property {google.cloud.bigquery.storage.v1.WriteStream.Type|null} [type] WriteStream type + * @property {google.protobuf.ITimestamp|null} [createTime] WriteStream createTime + * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime + * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [tableSchema] WriteStream tableSchema + */ + + /** + * Constructs a new WriteStream. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a WriteStream. + * @implements IWriteStream + * @constructor + * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set + */ + function WriteStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * WriteStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @instance */ - CreateReadSessionRequest.prototype.requestedStreams = 0; + WriteStream.prototype.name = ""; /** - * CreateReadSessionRequest readOptions. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null|undefined} readOptions - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * WriteStream type. + * @member {google.cloud.bigquery.storage.v1.WriteStream.Type} type + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @instance */ - CreateReadSessionRequest.prototype.readOptions = null; + WriteStream.prototype.type = 0; /** - * CreateReadSessionRequest format. - * @member {google.cloud.bigquery.storage.v1beta1.DataFormat} format - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * WriteStream createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @instance */ - CreateReadSessionRequest.prototype.format = 0; + WriteStream.prototype.createTime = null; /** - * CreateReadSessionRequest shardingStrategy. - * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * WriteStream commitTime. + * @member {google.protobuf.ITimestamp|null|undefined} commitTime + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @instance */ - CreateReadSessionRequest.prototype.shardingStrategy = 0; + WriteStream.prototype.commitTime = null; /** - * Creates a new CreateReadSessionRequest instance using the specified properties. + * WriteStream tableSchema. + * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} tableSchema + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.tableSchema = null; + + /** + * Creates a new WriteStream instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest instance + * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream instance */ - CreateReadSessionRequest.create = function create(properties) { - return new CreateReadSessionRequest(properties); + WriteStream.create = function create(properties) { + return new WriteStream(properties); }; /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - CreateReadSessionRequest.encode = function encode(message, writer) { + WriteStream.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) - $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); - if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) - $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.format != null && Object.hasOwnProperty.call(message, "format")) - writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); - if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); - if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) - writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) + $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.tableSchema != null && Object.hasOwnProperty.call(message, "tableSchema")) + $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { + WriteStream.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * Decodes a WriteStream message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateReadSessionRequest.decode = function decode(reader, length) { + WriteStream.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - case 6: - message.parent = reader.string(); + message.name = reader.string(); break; case 2: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + message.type = reader.int32(); break; case 3: - message.requestedStreams = reader.int32(); + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); break; case 4: - message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); break; case 5: - message.format = reader.int32(); - break; - case 7: - message.shardingStrategy = reader.int32(); + message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -6658,239 +8062,197 @@ }; /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * Decodes a WriteStream message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { + WriteStream.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a CreateReadSessionRequest message. + * Verifies a WriteStream message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - CreateReadSessionRequest.verify = function verify(message) { + WriteStream.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.tableReference != null && message.hasOwnProperty("tableReference")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.createTime); if (error) - return "tableReference." + error; + return "createTime." + error; } - if (message.parent != null && message.hasOwnProperty("parent")) - if (!$util.isString(message.parent)) - return "parent: string expected"; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (message.commitTime != null && message.hasOwnProperty("commitTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTime); if (error) - return "tableModifiers." + error; + return "commitTime." + error; } - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - if (!$util.isInteger(message.requestedStreams)) - return "requestedStreams: integer expected"; - if (message.readOptions != null && message.hasOwnProperty("readOptions")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify(message.readOptions); + if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.tableSchema); if (error) - return "readOptions." + error; + return "tableSchema." + error; } - if (message.format != null && message.hasOwnProperty("format")) - switch (message.format) { - default: - return "format: enum value expected"; - case 0: - case 1: - case 3: - break; - } - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - switch (message.shardingStrategy) { - default: - return "shardingStrategy: enum value expected"; - case 0: - case 1: - case 2: - break; - } return null; }; /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream */ - CreateReadSessionRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) + WriteStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.WriteStream) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); - if (object.tableReference != null) { - if (typeof object.tableReference !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableReference: object expected"); - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); - } - if (object.parent != null) - message.parent = String(object.parent); - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); - } - if (object.requestedStreams != null) - message.requestedStreams = object.requestedStreams | 0; - if (object.readOptions != null) { - if (typeof object.readOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.readOptions: object expected"); - message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); - } - switch (object.format) { - case "DATA_FORMAT_UNSPECIFIED": + var message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); + if (object.name != null) + message.name = String(object.name); + switch (object.type) { + case "TYPE_UNSPECIFIED": case 0: - message.format = 0; + message.type = 0; break; - case "AVRO": + case "COMMITTED": case 1: - message.format = 1; + message.type = 1; break; - case "ARROW": + case "PENDING": + case 2: + message.type = 2; + break; + case "BUFFERED": case 3: - message.format = 3; + message.type = 3; break; } - switch (object.shardingStrategy) { - case "SHARDING_STRATEGY_UNSPECIFIED": - case 0: - message.shardingStrategy = 0; - break; - case "LIQUID": - case 1: - message.shardingStrategy = 1; - break; - case "BALANCED": - case 2: - message.shardingStrategy = 2; - break; + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.commitTime != null) { + if (typeof object.commitTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.commitTime: object expected"); + message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); + } + if (object.tableSchema != null) { + if (typeof object.tableSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.tableSchema: object expected"); + message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.tableSchema); } return message; }; /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * Creates a plain object from a WriteStream message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @static - * @param {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} message CreateReadSessionRequest + * @param {google.cloud.bigquery.storage.v1.WriteStream} message WriteStream * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - CreateReadSessionRequest.toObject = function toObject(message, options) { + WriteStream.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - object.tableReference = null; - object.tableModifiers = null; - object.requestedStreams = 0; - object.readOptions = null; - object.format = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; - object.parent = ""; - object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; + object.name = ""; + object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; + object.createTime = null; + object.commitTime = null; + object.tableSchema = null; } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) - object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - object.requestedStreams = message.requestedStreams; - if (message.readOptions != null && message.hasOwnProperty("readOptions")) - object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); - if (message.format != null && message.hasOwnProperty("format")) - object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; - if (message.parent != null && message.hasOwnProperty("parent")) - object.parent = message.parent; - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] : message.type; + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.commitTime != null && message.hasOwnProperty("commitTime")) + object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); + if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) + object.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.tableSchema, options); return object; }; /** - * Converts this CreateReadSessionRequest to JSON. + * Converts this WriteStream to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @memberof google.cloud.bigquery.storage.v1.WriteStream * @instance * @returns {Object.} JSON object */ - CreateReadSessionRequest.prototype.toJSON = function toJSON() { + WriteStream.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return CreateReadSessionRequest; - })(); - - /** - * DataFormat enum. - * @name google.cloud.bigquery.storage.v1beta1.DataFormat - * @enum {number} - * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value - * @property {number} AVRO=1 AVRO value - * @property {number} ARROW=3 ARROW value - */ - v1beta1.DataFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; - values[valuesById[1] = "AVRO"] = 1; - values[valuesById[3] = "ARROW"] = 3; - return values; - })(); + /** + * Type enum. + * @name google.cloud.bigquery.storage.v1.WriteStream.Type + * @enum {number} + * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value + * @property {number} COMMITTED=1 COMMITTED value + * @property {number} PENDING=2 PENDING value + * @property {number} BUFFERED=3 BUFFERED value + */ + WriteStream.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; + values[valuesById[1] = "COMMITTED"] = 1; + values[valuesById[2] = "PENDING"] = 2; + values[valuesById[3] = "BUFFERED"] = 3; + return values; + })(); - /** - * ShardingStrategy enum. - * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy - * @enum {number} - * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value - * @property {number} LIQUID=1 LIQUID value - * @property {number} BALANCED=2 BALANCED value - */ - v1beta1.ShardingStrategy = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; - values[valuesById[1] = "LIQUID"] = 1; - values[valuesById[2] = "BALANCED"] = 2; - return values; + return WriteStream; })(); - v1beta1.ReadRowsRequest = (function() { + v1.TableSchema = (function() { /** - * Properties of a ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadRowsRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null} [readPosition] ReadRowsRequest readPosition + * Properties of a TableSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ITableSchema + * @property {Array.|null} [fields] TableSchema fields */ /** - * Constructs a new ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadRowsRequest. - * @implements IReadRowsRequest + * Constructs a new TableSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a TableSchema. + * @implements ITableSchema * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set */ - function ReadRowsRequest(properties) { + function TableSchema(properties) { + this.fields = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -6898,75 +8260,78 @@ } /** - * ReadRowsRequest readPosition. - * @member {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null|undefined} readPosition - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * TableSchema fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @instance */ - ReadRowsRequest.prototype.readPosition = null; + TableSchema.prototype.fields = $util.emptyArray; /** - * Creates a new ReadRowsRequest instance using the specified properties. + * Creates a new TableSchema instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest instance + * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema instance */ - ReadRowsRequest.create = function create(properties) { - return new ReadRowsRequest(properties); + TableSchema.create = function create(properties) { + return new TableSchema(properties); }; /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadRowsRequest.encode = function encode(message, writer) { + TableSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.readPosition != null && Object.hasOwnProperty.call(message, "readPosition")) - $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + TableSchema.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. + * Decodes a TableSchema message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsRequest.decode = function decode(reader, length) { + TableSchema.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -6977,115 +8342,132 @@ }; /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * Decodes a TableSchema message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + TableSchema.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ReadRowsRequest message. + * Verifies a TableSchema message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ReadRowsRequest.verify = function verify(message) { + TableSchema.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.readPosition != null && message.hasOwnProperty("readPosition")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.verify(message.readPosition); - if (error) - return "readPosition." + error; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } } return null; }; /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema */ - ReadRowsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) + TableSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.TableSchema) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); - if (object.readPosition != null) { - if (typeof object.readPosition !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.readPosition: object expected"); - message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.fromObject(object.readPosition); + var message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); + } } return message; }; /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * Creates a plain object from a TableSchema message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} message ReadRowsRequest + * @param {google.cloud.bigquery.storage.v1.TableSchema} message TableSchema * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ReadRowsRequest.toObject = function toObject(message, options) { + TableSchema.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) - object.readPosition = null; - if (message.readPosition != null && message.hasOwnProperty("readPosition")) - object.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.toObject(message.readPosition, options); + if (options.arrays || options.defaults) + object.fields = []; + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); + } return object; }; /** - * Converts this ReadRowsRequest to JSON. + * Converts this TableSchema to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @memberof google.cloud.bigquery.storage.v1.TableSchema * @instance * @returns {Object.} JSON object */ - ReadRowsRequest.prototype.toJSON = function toJSON() { + TableSchema.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ReadRowsRequest; + return TableSchema; })(); - v1beta1.StreamStatus = (function() { + v1.TableFieldSchema = (function() { /** - * Properties of a StreamStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStreamStatus - * @property {number|Long|null} [estimatedRowCount] StreamStatus estimatedRowCount - * @property {number|null} [fractionConsumed] StreamStatus fractionConsumed - * @property {google.cloud.bigquery.storage.v1beta1.IProgress|null} [progress] StreamStatus progress - * @property {boolean|null} [isSplittable] StreamStatus isSplittable + * Properties of a TableFieldSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ITableFieldSchema + * @property {string|null} [name] TableFieldSchema name + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null} [type] TableFieldSchema type + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null} [mode] TableFieldSchema mode + * @property {Array.|null} [fields] TableFieldSchema fields + * @property {string|null} [description] TableFieldSchema description + * @property {number|Long|null} [maxLength] TableFieldSchema maxLength + * @property {number|Long|null} [precision] TableFieldSchema precision + * @property {number|Long|null} [scale] TableFieldSchema scale */ /** - * Constructs a new StreamStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a StreamStatus. - * @implements IStreamStatus + * Constructs a new TableFieldSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a TableFieldSchema. + * @implements ITableFieldSchema * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set */ - function StreamStatus(properties) { + function TableFieldSchema(properties) { + this.fields = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -7093,114 +8475,169 @@ } /** - * StreamStatus estimatedRowCount. - * @member {number|Long} estimatedRowCount - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * TableFieldSchema name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @instance */ - StreamStatus.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + TableFieldSchema.prototype.name = ""; /** - * StreamStatus fractionConsumed. - * @member {number} fractionConsumed - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * TableFieldSchema type. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Type} type + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @instance */ - StreamStatus.prototype.fractionConsumed = 0; + TableFieldSchema.prototype.type = 0; /** - * StreamStatus progress. - * @member {google.cloud.bigquery.storage.v1beta1.IProgress|null|undefined} progress - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * TableFieldSchema mode. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode} mode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @instance */ - StreamStatus.prototype.progress = null; + TableFieldSchema.prototype.mode = 0; /** - * StreamStatus isSplittable. - * @member {boolean} isSplittable - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * TableFieldSchema fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @instance */ - StreamStatus.prototype.isSplittable = false; + TableFieldSchema.prototype.fields = $util.emptyArray; /** - * Creates a new StreamStatus instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * TableFieldSchema description. + * @member {string} description + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.description = ""; + + /** + * TableFieldSchema maxLength. + * @member {number|Long} maxLength + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.maxLength = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * TableFieldSchema precision. + * @member {number|Long} precision + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.precision = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * TableFieldSchema scale. + * @member {number|Long} scale + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.scale = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new TableFieldSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus instance + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema instance */ - StreamStatus.create = function create(properties) { - return new StreamStatus(properties); + TableFieldSchema.create = function create(properties) { + return new TableFieldSchema(properties); }; /** - * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - StreamStatus.encode = function encode(message, writer) { + TableFieldSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); - if (message.fractionConsumed != null && Object.hasOwnProperty.call(message, "fractionConsumed")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); - if (message.isSplittable != null && Object.hasOwnProperty.call(message, "isSplittable")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); - if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) - $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); + if (message.mode != null && Object.hasOwnProperty.call(message, "mode")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.mode); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.description != null && Object.hasOwnProperty.call(message, "description")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.description); + if (message.maxLength != null && Object.hasOwnProperty.call(message, "maxLength")) + writer.uint32(/* id 7, wireType 0 =*/56).int64(message.maxLength); + if (message.precision != null && Object.hasOwnProperty.call(message, "precision")) + writer.uint32(/* id 8, wireType 0 =*/64).int64(message.precision); + if (message.scale != null && Object.hasOwnProperty.call(message, "scale")) + writer.uint32(/* id 9, wireType 0 =*/72).int64(message.scale); return writer; }; /** - * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - StreamStatus.encodeDelimited = function encodeDelimited(message, writer) { + TableFieldSchema.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a StreamStatus message from the specified reader or buffer. + * Decodes a TableFieldSchema message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamStatus.decode = function decode(reader, length) { + TableFieldSchema.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.estimatedRowCount = reader.int64(); + message.name = reader.string(); break; case 2: - message.fractionConsumed = reader.float(); + message.type = reader.int32(); + break; + case 3: + message.mode = reader.int32(); break; case 4: - message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); break; - case 3: - message.isSplittable = reader.bool(); + case 6: + message.description = reader.string(); + break; + case 7: + message.maxLength = reader.int64(); + break; + case 8: + message.precision = reader.int64(); + break; + case 9: + message.scale = reader.int64(); break; default: reader.skipType(tag & 7); @@ -7211,152 +8648,402 @@ }; /** - * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamStatus.decodeDelimited = function decodeDelimited(reader) { + TableFieldSchema.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a StreamStatus message. + * Verifies a TableFieldSchema message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - StreamStatus.verify = function verify(message) { + TableFieldSchema.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) - if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) - return "estimatedRowCount: integer|Long expected"; - if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) - if (typeof message.fractionConsumed !== "number") - return "fractionConsumed: number expected"; - if (message.progress != null && message.hasOwnProperty("progress")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Progress.verify(message.progress); - if (error) - return "progress." + error; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + break; + } + if (message.mode != null && message.hasOwnProperty("mode")) + switch (message.mode) { + default: + return "mode: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } } - if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) - if (typeof message.isSplittable !== "boolean") - return "isSplittable: boolean expected"; + if (message.description != null && message.hasOwnProperty("description")) + if (!$util.isString(message.description)) + return "description: string expected"; + if (message.maxLength != null && message.hasOwnProperty("maxLength")) + if (!$util.isInteger(message.maxLength) && !(message.maxLength && $util.isInteger(message.maxLength.low) && $util.isInteger(message.maxLength.high))) + return "maxLength: integer|Long expected"; + if (message.precision != null && message.hasOwnProperty("precision")) + if (!$util.isInteger(message.precision) && !(message.precision && $util.isInteger(message.precision.low) && $util.isInteger(message.precision.high))) + return "precision: integer|Long expected"; + if (message.scale != null && message.hasOwnProperty("scale")) + if (!$util.isInteger(message.scale) && !(message.scale && $util.isInteger(message.scale.low) && $util.isInteger(message.scale.high))) + return "scale: integer|Long expected"; return null; }; /** - * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema */ - StreamStatus.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamStatus) + TableFieldSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.TableFieldSchema) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); - if (object.estimatedRowCount != null) - if ($util.Long) - (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; - else if (typeof object.estimatedRowCount === "string") - message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); - else if (typeof object.estimatedRowCount === "number") - message.estimatedRowCount = object.estimatedRowCount; - else if (typeof object.estimatedRowCount === "object") - message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); - if (object.fractionConsumed != null) - message.fractionConsumed = Number(object.fractionConsumed); - if (object.progress != null) { - if (typeof object.progress !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamStatus.progress: object expected"); - message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.fromObject(object.progress); + var message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); + if (object.name != null) + message.name = String(object.name); + switch (object.type) { + case "TYPE_UNSPECIFIED": + case 0: + message.type = 0; + break; + case "STRING": + case 1: + message.type = 1; + break; + case "INT64": + case 2: + message.type = 2; + break; + case "DOUBLE": + case 3: + message.type = 3; + break; + case "STRUCT": + case 4: + message.type = 4; + break; + case "BYTES": + case 5: + message.type = 5; + break; + case "BOOL": + case 6: + message.type = 6; + break; + case "TIMESTAMP": + case 7: + message.type = 7; + break; + case "DATE": + case 8: + message.type = 8; + break; + case "TIME": + case 9: + message.type = 9; + break; + case "DATETIME": + case 10: + message.type = 10; + break; + case "GEOGRAPHY": + case 11: + message.type = 11; + break; + case "NUMERIC": + case 12: + message.type = 12; + break; + case "BIGNUMERIC": + case 13: + message.type = 13; + break; + case "INTERVAL": + case 14: + message.type = 14; + break; + case "JSON": + case 15: + message.type = 15; + break; } - if (object.isSplittable != null) - message.isSplittable = Boolean(object.isSplittable); + switch (object.mode) { + case "MODE_UNSPECIFIED": + case 0: + message.mode = 0; + break; + case "NULLABLE": + case 1: + message.mode = 1; + break; + case "REQUIRED": + case 2: + message.mode = 2; + break; + case "REPEATED": + case 3: + message.mode = 3; + break; + } + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); + } + } + if (object.description != null) + message.description = String(object.description); + if (object.maxLength != null) + if ($util.Long) + (message.maxLength = $util.Long.fromValue(object.maxLength)).unsigned = false; + else if (typeof object.maxLength === "string") + message.maxLength = parseInt(object.maxLength, 10); + else if (typeof object.maxLength === "number") + message.maxLength = object.maxLength; + else if (typeof object.maxLength === "object") + message.maxLength = new $util.LongBits(object.maxLength.low >>> 0, object.maxLength.high >>> 0).toNumber(); + if (object.precision != null) + if ($util.Long) + (message.precision = $util.Long.fromValue(object.precision)).unsigned = false; + else if (typeof object.precision === "string") + message.precision = parseInt(object.precision, 10); + else if (typeof object.precision === "number") + message.precision = object.precision; + else if (typeof object.precision === "object") + message.precision = new $util.LongBits(object.precision.low >>> 0, object.precision.high >>> 0).toNumber(); + if (object.scale != null) + if ($util.Long) + (message.scale = $util.Long.fromValue(object.scale)).unsigned = false; + else if (typeof object.scale === "string") + message.scale = parseInt(object.scale, 10); + else if (typeof object.scale === "number") + message.scale = object.scale; + else if (typeof object.scale === "object") + message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); return message; }; /** - * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.StreamStatus} message StreamStatus + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema} message TableFieldSchema * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - StreamStatus.toObject = function toObject(message, options) { + TableFieldSchema.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.fields = []; if (options.defaults) { + object.name = ""; + object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; + object.mode = options.enums === String ? "MODE_UNSPECIFIED" : 0; + object.description = ""; if ($util.Long) { var long = new $util.Long(0, 0, false); - object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + object.maxLength = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else - object.estimatedRowCount = options.longs === String ? "0" : 0; - object.fractionConsumed = 0; - object.isSplittable = false; - object.progress = null; + object.maxLength = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.precision = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.precision = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.scale = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.scale = options.longs === String ? "0" : 0; } - if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) - if (typeof message.estimatedRowCount === "number") - object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; + if (message.mode != null && message.hasOwnProperty("mode")) + object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] : message.mode; + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); + } + if (message.description != null && message.hasOwnProperty("description")) + object.description = message.description; + if (message.maxLength != null && message.hasOwnProperty("maxLength")) + if (typeof message.maxLength === "number") + object.maxLength = options.longs === String ? String(message.maxLength) : message.maxLength; else - object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; - if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) - object.fractionConsumed = options.json && !isFinite(message.fractionConsumed) ? String(message.fractionConsumed) : message.fractionConsumed; - if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) - object.isSplittable = message.isSplittable; - if (message.progress != null && message.hasOwnProperty("progress")) - object.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.toObject(message.progress, options); + object.maxLength = options.longs === String ? $util.Long.prototype.toString.call(message.maxLength) : options.longs === Number ? new $util.LongBits(message.maxLength.low >>> 0, message.maxLength.high >>> 0).toNumber() : message.maxLength; + if (message.precision != null && message.hasOwnProperty("precision")) + if (typeof message.precision === "number") + object.precision = options.longs === String ? String(message.precision) : message.precision; + else + object.precision = options.longs === String ? $util.Long.prototype.toString.call(message.precision) : options.longs === Number ? new $util.LongBits(message.precision.low >>> 0, message.precision.high >>> 0).toNumber() : message.precision; + if (message.scale != null && message.hasOwnProperty("scale")) + if (typeof message.scale === "number") + object.scale = options.longs === String ? String(message.scale) : message.scale; + else + object.scale = options.longs === String ? $util.Long.prototype.toString.call(message.scale) : options.longs === Number ? new $util.LongBits(message.scale.low >>> 0, message.scale.high >>> 0).toNumber() : message.scale; return object; }; /** - * Converts this StreamStatus to JSON. + * Converts this TableFieldSchema to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema * @instance * @returns {Object.} JSON object */ - StreamStatus.prototype.toJSON = function toJSON() { + TableFieldSchema.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return StreamStatus; + /** + * Type enum. + * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Type + * @enum {number} + * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value + * @property {number} STRING=1 STRING value + * @property {number} INT64=2 INT64 value + * @property {number} DOUBLE=3 DOUBLE value + * @property {number} STRUCT=4 STRUCT value + * @property {number} BYTES=5 BYTES value + * @property {number} BOOL=6 BOOL value + * @property {number} TIMESTAMP=7 TIMESTAMP value + * @property {number} DATE=8 DATE value + * @property {number} TIME=9 TIME value + * @property {number} DATETIME=10 DATETIME value + * @property {number} GEOGRAPHY=11 GEOGRAPHY value + * @property {number} NUMERIC=12 NUMERIC value + * @property {number} BIGNUMERIC=13 BIGNUMERIC value + * @property {number} INTERVAL=14 INTERVAL value + * @property {number} JSON=15 JSON value + */ + TableFieldSchema.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; + values[valuesById[1] = "STRING"] = 1; + values[valuesById[2] = "INT64"] = 2; + values[valuesById[3] = "DOUBLE"] = 3; + values[valuesById[4] = "STRUCT"] = 4; + values[valuesById[5] = "BYTES"] = 5; + values[valuesById[6] = "BOOL"] = 6; + values[valuesById[7] = "TIMESTAMP"] = 7; + values[valuesById[8] = "DATE"] = 8; + values[valuesById[9] = "TIME"] = 9; + values[valuesById[10] = "DATETIME"] = 10; + values[valuesById[11] = "GEOGRAPHY"] = 11; + values[valuesById[12] = "NUMERIC"] = 12; + values[valuesById[13] = "BIGNUMERIC"] = 13; + values[valuesById[14] = "INTERVAL"] = 14; + values[valuesById[15] = "JSON"] = 15; + return values; + })(); + + /** + * Mode enum. + * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Mode + * @enum {number} + * @property {number} MODE_UNSPECIFIED=0 MODE_UNSPECIFIED value + * @property {number} NULLABLE=1 NULLABLE value + * @property {number} REQUIRED=2 REQUIRED value + * @property {number} REPEATED=3 REPEATED value + */ + TableFieldSchema.Mode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "NULLABLE"] = 1; + values[valuesById[2] = "REQUIRED"] = 2; + values[valuesById[3] = "REPEATED"] = 3; + return values; + })(); + + return TableFieldSchema; })(); - v1beta1.Progress = (function() { + return v1; + })(); + + storage.v1beta1 = (function() { + + /** + * Namespace v1beta1. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1beta1 = {}; + + v1beta1.ArrowSchema = (function() { /** - * Properties of a Progress. + * Properties of an ArrowSchema. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IProgress - * @property {number|null} [atResponseStart] Progress atResponseStart - * @property {number|null} [atResponseEnd] Progress atResponseEnd + * @interface IArrowSchema + * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema */ /** - * Constructs a new Progress. + * Constructs a new ArrowSchema. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a Progress. - * @implements IProgress + * @classdesc Represents an ArrowSchema. + * @implements IArrowSchema * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set */ - function Progress(properties) { + function ArrowSchema(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -7364,88 +9051,75 @@ } /** - * Progress atResponseStart. - * @member {number} atResponseStart - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * ArrowSchema serializedSchema. + * @member {Uint8Array} serializedSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @instance */ - Progress.prototype.atResponseStart = 0; + ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); /** - * Progress atResponseEnd. - * @member {number} atResponseEnd - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @instance - */ - Progress.prototype.atResponseEnd = 0; - - /** - * Creates a new Progress instance using the specified properties. + * Creates a new ArrowSchema instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress instance + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema instance */ - Progress.create = function create(properties) { - return new Progress(properties); + ArrowSchema.create = function create(properties) { + return new ArrowSchema(properties); }; /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Progress.encode = function encode(message, writer) { + ArrowSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) - writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); - if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); + if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); return writer; }; /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Progress.encodeDelimited = function encodeDelimited(message, writer) { + ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a Progress message from the specified reader or buffer. + * Decodes an ArrowSchema message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Progress.decode = function decode(reader, length) { + ArrowSchema.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.atResponseStart = reader.float(); - break; - case 2: - message.atResponseEnd = reader.float(); + message.serializedSchema = reader.bytes(); break; default: reader.skipType(tag & 7); @@ -7456,116 +9130,117 @@ }; /** - * Decodes a Progress message from the specified reader or buffer, length delimited. + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Progress.decodeDelimited = function decodeDelimited(reader) { + ArrowSchema.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a Progress message. + * Verifies an ArrowSchema message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Progress.verify = function verify(message) { + ArrowSchema.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - if (typeof message.atResponseStart !== "number") - return "atResponseStart: number expected"; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - if (typeof message.atResponseEnd !== "number") - return "atResponseEnd: number expected"; + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) + return "serializedSchema: buffer expected"; return null; }; /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema */ - Progress.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Progress) + ArrowSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); - if (object.atResponseStart != null) - message.atResponseStart = Number(object.atResponseStart); - if (object.atResponseEnd != null) - message.atResponseEnd = Number(object.atResponseEnd); + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + if (object.serializedSchema != null) + if (typeof object.serializedSchema === "string") + $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); + else if (object.serializedSchema.length) + message.serializedSchema = object.serializedSchema; return message; }; /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.Progress} message Progress + * @param {google.cloud.bigquery.storage.v1beta1.ArrowSchema} message ArrowSchema * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - Progress.toObject = function toObject(message, options) { + ArrowSchema.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) { - object.atResponseStart = 0; - object.atResponseEnd = 0; - } - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + if (options.defaults) + if (options.bytes === String) + object.serializedSchema = ""; + else { + object.serializedSchema = []; + if (options.bytes !== Array) + object.serializedSchema = $util.newBuffer(object.serializedSchema); + } + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; return object; }; /** - * Converts this Progress to JSON. + * Converts this ArrowSchema to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema * @instance * @returns {Object.} JSON object */ - Progress.prototype.toJSON = function toJSON() { + ArrowSchema.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return Progress; + return ArrowSchema; })(); - v1beta1.ThrottleStatus = (function() { + v1beta1.ArrowRecordBatch = (function() { /** - * Properties of a ThrottleStatus. + * Properties of an ArrowRecordBatch. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IThrottleStatus - * @property {number|null} [throttlePercent] ThrottleStatus throttlePercent + * @interface IArrowRecordBatch + * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch + * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount */ /** - * Constructs a new ThrottleStatus. + * Constructs a new ArrowRecordBatch. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ThrottleStatus. - * @implements IThrottleStatus + * @classdesc Represents an ArrowRecordBatch. + * @implements IArrowRecordBatch * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set */ - function ThrottleStatus(properties) { + function ArrowRecordBatch(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -7573,75 +9248,88 @@ } /** - * ThrottleStatus throttlePercent. - * @member {number} throttlePercent - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * ArrowRecordBatch serializedRecordBatch. + * @member {Uint8Array} serializedRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @instance */ - ThrottleStatus.prototype.throttlePercent = 0; + ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); /** - * Creates a new ThrottleStatus instance using the specified properties. + * ArrowRecordBatch rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus instance + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch instance */ - ThrottleStatus.create = function create(properties) { - return new ThrottleStatus(properties); + ArrowRecordBatch.create = function create(properties) { + return new ArrowRecordBatch(properties); }; /** - * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ThrottleStatus.encode = function encode(message, writer) { + ArrowRecordBatch.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; /** - * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ThrottleStatus.encodeDelimited = function encodeDelimited(message, writer) { + ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ThrottleStatus message from the specified reader or buffer. + * Decodes an ArrowRecordBatch message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ThrottleStatus.decode = function decode(reader, length) { + ArrowRecordBatch.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.throttlePercent = reader.int32(); + message.serializedRecordBatch = reader.bytes(); + break; + case 2: + message.rowCount = reader.int64(); break; default: reader.skipType(tag & 7); @@ -7652,111 +9340,139 @@ }; /** - * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ThrottleStatus.decodeDelimited = function decodeDelimited(reader) { + ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ThrottleStatus message. + * Verifies an ArrowRecordBatch message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ThrottleStatus.verify = function verify(message) { + ArrowRecordBatch.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - if (!$util.isInteger(message.throttlePercent)) - return "throttlePercent: integer expected"; + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) + return "serializedRecordBatch: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; return null; }; /** - * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch */ - ThrottleStatus.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus) + ArrowRecordBatch.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); - if (object.throttlePercent != null) - message.throttlePercent = object.throttlePercent | 0; - return message; - }; - + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + if (object.serializedRecordBatch != null) + if (typeof object.serializedRecordBatch === "string") + $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); + else if (object.serializedRecordBatch.length) + message.serializedRecordBatch = object.serializedRecordBatch; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + /** - * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @static - * @param {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} message ThrottleStatus + * @param {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} message ArrowRecordBatch * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ThrottleStatus.toObject = function toObject(message, options) { + ArrowRecordBatch.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) - object.throttlePercent = 0; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - object.throttlePercent = message.throttlePercent; + if (options.defaults) { + if (options.bytes === String) + object.serializedRecordBatch = ""; + else { + object.serializedRecordBatch = []; + if (options.bytes !== Array) + object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; return object; }; /** - * Converts this ThrottleStatus to JSON. + * Converts this ArrowRecordBatch to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch * @instance * @returns {Object.} JSON object */ - ThrottleStatus.prototype.toJSON = function toJSON() { + ArrowRecordBatch.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ThrottleStatus; + return ArrowRecordBatch; })(); - v1beta1.ReadRowsResponse = (function() { + v1beta1.AvroSchema = (function() { /** - * Properties of a ReadRowsResponse. + * Properties of an AvroSchema. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadRowsResponse - * @property {google.cloud.bigquery.storage.v1beta1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows - * @property {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch - * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount - * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status - * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus + * @interface IAvroSchema + * @property {string|null} [schema] AvroSchema schema */ /** - * Constructs a new ReadRowsResponse. + * Constructs a new AvroSchema. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadRowsResponse. - * @implements IReadRowsResponse + * @classdesc Represents an AvroSchema. + * @implements IAvroSchema * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set */ - function ReadRowsResponse(properties) { + function AvroSchema(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -7764,141 +9480,75 @@ } /** - * ReadRowsResponse avroRows. - * @member {google.cloud.bigquery.storage.v1beta1.IAvroRows|null|undefined} avroRows - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.avroRows = null; - - /** - * ReadRowsResponse arrowRecordBatch. - * @member {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null|undefined} arrowRecordBatch - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.arrowRecordBatch = null; - - /** - * ReadRowsResponse rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * ReadRowsResponse status. - * @member {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null|undefined} status - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.status = null; - - /** - * ReadRowsResponse throttleStatus. - * @member {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null|undefined} throttleStatus - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.throttleStatus = null; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * ReadRowsResponse rows. - * @member {"avroRows"|"arrowRecordBatch"|undefined} rows - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * AvroSchema schema. + * @member {string} schema + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @instance */ - Object.defineProperty(ReadRowsResponse.prototype, "rows", { - get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), - set: $util.oneOfSetter($oneOfFields) - }); + AvroSchema.prototype.schema = ""; /** - * Creates a new ReadRowsResponse instance using the specified properties. + * Creates a new AvroSchema instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse instance + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema instance */ - ReadRowsResponse.create = function create(properties) { - return new ReadRowsResponse(properties); + AvroSchema.create = function create(properties) { + return new AvroSchema(properties); }; /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadRowsResponse.encode = function encode(message, writer) { + AvroSchema.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.status != null && Object.hasOwnProperty.call(message, "status")) - $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) - $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) - $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.throttleStatus != null && Object.hasOwnProperty.call(message, "throttleStatus")) - $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); return writer; }; /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. + * Decodes an AvroSchema message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsResponse.decode = function decode(reader, length) { + AvroSchema.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 3: - message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); - break; - case 4: - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); - break; - case 6: - message.rowCount = reader.int64(); - break; - case 2: - message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); - break; - case 5: - message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); + case 1: + message.schema = reader.string(); break; default: reader.skipType(tag & 7); @@ -7909,188 +9559,108 @@ }; /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + AvroSchema.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ReadRowsResponse message. + * Verifies an AvroSchema message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ReadRowsResponse.verify = function verify(message) { + AvroSchema.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - var properties = {}; - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.verify(message.avroRows); - if (error) - return "avroRows." + error; - } - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - if (properties.rows === 1) - return "rows: multiple values"; - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify(message.arrowRecordBatch); - if (error) - return "arrowRecordBatch." + error; - } - } - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - if (message.status != null && message.hasOwnProperty("status")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.verify(message.status); - if (error) - return "status." + error; - } - if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify(message.throttleStatus); - if (error) - return "throttleStatus." + error; - } + if (message.schema != null && message.hasOwnProperty("schema")) + if (!$util.isString(message.schema)) + return "schema: string expected"; return null; }; /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema */ - ReadRowsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) + AvroSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroSchema) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); - if (object.avroRows != null) { - if (typeof object.avroRows !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroRows: object expected"); - message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.fromObject(object.avroRows); - } - if (object.arrowRecordBatch != null) { - if (typeof object.arrowRecordBatch !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowRecordBatch: object expected"); - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); - } - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - if (object.status != null) { - if (typeof object.status !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status: object expected"); - message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.fromObject(object.status); - } - if (object.throttleStatus != null) { - if (typeof object.throttleStatus !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); - message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); - } + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + if (object.schema != null) + message.schema = String(object.schema); return message; }; /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} message ReadRowsResponse + * @param {google.cloud.bigquery.storage.v1beta1.AvroSchema} message AvroSchema * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ReadRowsResponse.toObject = function toObject(message, options) { + AvroSchema.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.defaults) { - object.status = null; - object.throttleStatus = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.status != null && message.hasOwnProperty("status")) - object.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.toObject(message.status, options); - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - object.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.toObject(message.avroRows, options); - if (options.oneofs) - object.rows = "avroRows"; - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); - if (options.oneofs) - object.rows = "arrowRecordBatch"; - } - if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) - object.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.toObject(message.throttleStatus, options); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + if (options.defaults) + object.schema = ""; + if (message.schema != null && message.hasOwnProperty("schema")) + object.schema = message.schema; return object; }; /** - * Converts this ReadRowsResponse to JSON. + * Converts this AvroSchema to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema * @instance * @returns {Object.} JSON object */ - ReadRowsResponse.prototype.toJSON = function toJSON() { + AvroSchema.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ReadRowsResponse; + return AvroSchema; })(); - v1beta1.BatchCreateReadSessionStreamsRequest = (function() { + v1beta1.AvroRows = (function() { /** - * Properties of a BatchCreateReadSessionStreamsRequest. + * Properties of an AvroRows. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IBatchCreateReadSessionStreamsRequest - * @property {google.cloud.bigquery.storage.v1beta1.IReadSession|null} [session] BatchCreateReadSessionStreamsRequest session - * @property {number|null} [requestedStreams] BatchCreateReadSessionStreamsRequest requestedStreams + * @interface IAvroRows + * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows + * @property {number|Long|null} [rowCount] AvroRows rowCount */ /** - * Constructs a new BatchCreateReadSessionStreamsRequest. + * Constructs a new AvroRows. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BatchCreateReadSessionStreamsRequest. - * @implements IBatchCreateReadSessionStreamsRequest + * @classdesc Represents an AvroRows. + * @implements IAvroRows * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set */ - function BatchCreateReadSessionStreamsRequest(properties) { + function AvroRows(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -8098,88 +9668,88 @@ } /** - * BatchCreateReadSessionStreamsRequest session. - * @member {google.cloud.bigquery.storage.v1beta1.IReadSession|null|undefined} session - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * AvroRows serializedBinaryRows. + * @member {Uint8Array} serializedBinaryRows + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @instance */ - BatchCreateReadSessionStreamsRequest.prototype.session = null; + AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); /** - * BatchCreateReadSessionStreamsRequest requestedStreams. - * @member {number} requestedStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * AvroRows rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @instance */ - BatchCreateReadSessionStreamsRequest.prototype.requestedStreams = 0; + AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * Creates a new AvroRows instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest instance + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows instance */ - BatchCreateReadSessionStreamsRequest.create = function create(properties) { - return new BatchCreateReadSessionStreamsRequest(properties); + AvroRows.create = function create(properties) { + return new AvroRows(properties); }; /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { + AvroRows.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.session != null && Object.hasOwnProperty.call(message, "session")) - $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); + if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); return writer; }; /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - BatchCreateReadSessionStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { + AvroRows.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * Decodes an AvroRows message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { + AvroRows.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); + message.serializedBinaryRows = reader.bytes(); break; case 2: - message.requestedStreams = reader.int32(); + message.rowCount = reader.int64(); break; default: reader.skipType(tag & 7); @@ -8190,122 +9760,141 @@ }; /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * Decodes an AvroRows message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsRequest.decodeDelimited = function decodeDelimited(reader) { + AvroRows.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a BatchCreateReadSessionStreamsRequest message. + * Verifies an AvroRows message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - BatchCreateReadSessionStreamsRequest.verify = function verify(message) { + AvroRows.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.session != null && message.hasOwnProperty("session")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.verify(message.session); - if (error) - return "session." + error; - } - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - if (!$util.isInteger(message.requestedStreams)) - return "requestedStreams: integer expected"; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) + return "serializedBinaryRows: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; return null; }; /** - * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows */ - BatchCreateReadSessionStreamsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) + AvroRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroRows) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); - if (object.session != null) { - if (typeof object.session !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session: object expected"); - message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.fromObject(object.session); - } - if (object.requestedStreams != null) - message.requestedStreams = object.requestedStreams | 0; + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + if (object.serializedBinaryRows != null) + if (typeof object.serializedBinaryRows === "string") + $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); + else if (object.serializedBinaryRows.length) + message.serializedBinaryRows = object.serializedBinaryRows; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); return message; }; /** - * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @static - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest + * @param {google.cloud.bigquery.storage.v1beta1.AvroRows} message AvroRows * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - BatchCreateReadSessionStreamsRequest.toObject = function toObject(message, options) { + AvroRows.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - object.session = null; - object.requestedStreams = 0; + if (options.bytes === String) + object.serializedBinaryRows = ""; + else { + object.serializedBinaryRows = []; + if (options.bytes !== Array) + object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; } - if (message.session != null && message.hasOwnProperty("session")) - object.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.toObject(message.session, options); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - object.requestedStreams = message.requestedStreams; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; return object; }; /** - * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * Converts this AvroRows to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows * @instance * @returns {Object.} JSON object */ - BatchCreateReadSessionStreamsRequest.prototype.toJSON = function toJSON() { + AvroRows.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return BatchCreateReadSessionStreamsRequest; + return AvroRows; })(); - v1beta1.BatchCreateReadSessionStreamsResponse = (function() { + v1beta1.TableReadOptions = (function() { /** - * Properties of a BatchCreateReadSessionStreamsResponse. + * Properties of a TableReadOptions. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IBatchCreateReadSessionStreamsResponse - * @property {Array.|null} [streams] BatchCreateReadSessionStreamsResponse streams + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction */ /** - * Constructs a new BatchCreateReadSessionStreamsResponse. + * Constructs a new TableReadOptions. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BatchCreateReadSessionStreamsResponse. - * @implements IBatchCreateReadSessionStreamsResponse + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set */ - function BatchCreateReadSessionStreamsResponse(properties) { - this.streams = []; + function TableReadOptions(properties) { + this.selectedFields = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -8313,78 +9902,91 @@ } /** - * BatchCreateReadSessionStreamsResponse streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @instance */ - BatchCreateReadSessionStreamsResponse.prototype.streams = $util.emptyArray; + TableReadOptions.prototype.selectedFields = $util.emptyArray; /** - * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; + + /** + * Creates a new TableReadOptions instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse instance + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions instance */ - BatchCreateReadSessionStreamsResponse.create = function create(properties) { - return new BatchCreateReadSessionStreamsResponse(properties); + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); }; /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - BatchCreateReadSessionStreamsResponse.encode = function encode(message, writer) { + TableReadOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); return writer; }; /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - BatchCreateReadSessionStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * Decodes a TableReadOptions message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { + TableReadOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + case 2: + message.rowRestriction = reader.string(); break; default: reader.skipType(tag & 7); @@ -8395,200 +9997,404 @@ }; /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsResponse.decodeDelimited = function decodeDelimited(reader) { + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a BatchCreateReadSessionStreamsResponse message. + * Verifies a TableReadOptions message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - BatchCreateReadSessionStreamsResponse.verify = function verify(message) { + TableReadOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); - if (error) - return "streams." + error; - } + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; return null; }; /** - * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions */ - BatchCreateReadSessionStreamsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); - } + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); } + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); return message; }; /** - * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @static - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse + * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} message TableReadOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - BatchCreateReadSessionStreamsResponse.toObject = function toObject(message, options) { + TableReadOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.arrays || options.defaults) - object.streams = []; - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; return object; }; /** - * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * Converts this TableReadOptions to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions * @instance * @returns {Object.} JSON object */ - BatchCreateReadSessionStreamsResponse.prototype.toJSON = function toJSON() { + TableReadOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return BatchCreateReadSessionStreamsResponse; + return TableReadOptions; })(); - v1beta1.FinalizeStreamRequest = (function() { - - /** - * Properties of a FinalizeStreamRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IFinalizeStreamRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] FinalizeStreamRequest stream - */ + v1beta1.BigQueryStorage = (function() { /** - * Constructs a new FinalizeStreamRequest. + * Constructs a new BigQueryStorage service. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a FinalizeStreamRequest. - * @implements IFinalizeStreamRequest + * @classdesc Represents a BigQueryStorage + * @extends $protobuf.rpc.Service * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited */ - function FinalizeStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; + function BigQueryStorage(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); } - /** - * FinalizeStreamRequest stream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @instance - */ - FinalizeStreamRequest.prototype.stream = null; + (BigQueryStorage.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryStorage; /** - * Creates a new FinalizeStreamRequest instance using the specified properties. + * Creates new BigQueryStorage service using the specified rpc implementation. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest instance + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryStorage} RPC service. Useful where requests and/or responses are streamed. */ - FinalizeStreamRequest.create = function create(properties) { - return new FinalizeStreamRequest(properties); + BigQueryStorage.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); }; /** - * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeStreamRequest.encode = function encode(message, writer) { + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef CreateReadSessionCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} [response] ReadSession + */ + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.createReadSession = function createReadSession(request, callback) { + return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadSession, request, callback); + }, "name", { value: "CreateReadSession" }); + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef ReadRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} [response] ReadRowsResponse + */ + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.readRows = function readRows(request, callback) { + return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, request, callback); + }, "name", { value: "ReadRows" }); + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef BatchCreateReadSessionStreamsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} [response] BatchCreateReadSessionStreamsResponse + */ + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback} callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.batchCreateReadSessionStreams = function batchCreateReadSessionStreams(request, callback) { + return this.rpcCall(batchCreateReadSessionStreams, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, request, callback); + }, "name", { value: "BatchCreateReadSessionStreams" }); + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef FinalizeStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.protobuf.Empty} [response] Empty + */ + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback} callback Node-style callback called with the error, if any, and Empty + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.finalizeStream = function finalizeStream(request, callback) { + return this.rpcCall(finalizeStream, $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, $root.google.protobuf.Empty, request, callback); + }, "name", { value: "FinalizeStream" }); + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef SplitReadStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} [response] SplitReadStreamResponse + */ + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.splitReadStream = function splitReadStream(request, callback) { + return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, request, callback); + }, "name", { value: "SplitReadStream" }); + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryStorage; + })(); + + v1beta1.Stream = (function() { + + /** + * Properties of a Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStream + * @property {string|null} [name] Stream name + */ + + /** + * Constructs a new Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Stream. + * @implements IStream + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + */ + function Stream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Stream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @instance + */ + Stream.prototype.name = ""; + + /** + * Creates a new Stream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream instance + */ + Stream.create = function create(properties) { + return new Stream(properties); + }; + + /** + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Stream.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; /** - * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FinalizeStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + Stream.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * Decodes a Stream message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FinalizeStreamRequest.decode = function decode(reader, length) { + Stream.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + case 1: + message.name = reader.string(); break; default: reader.skipType(tag & 7); @@ -8599,113 +10405,108 @@ }; /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * Decodes a Stream message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FinalizeStreamRequest.decodeDelimited = function decodeDelimited(reader) { + Stream.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a FinalizeStreamRequest message. + * Verifies a Stream message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - FinalizeStreamRequest.verify = function verify(message) { + Stream.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.stream != null && message.hasOwnProperty("stream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); - if (error) - return "stream." + error; - } + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; return null; }; /** - * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * Creates a Stream message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream */ - FinalizeStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) + Stream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Stream) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); - if (object.stream != null) { - if (typeof object.stream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream: object expected"); - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); - } + var message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + if (object.name != null) + message.name = String(object.name); return message; }; /** - * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * Creates a plain object from a Stream message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @static - * @param {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} message FinalizeStreamRequest + * @param {google.cloud.bigquery.storage.v1beta1.Stream} message Stream * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FinalizeStreamRequest.toObject = function toObject(message, options) { + Stream.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) - object.stream = null; - if (message.stream != null && message.hasOwnProperty("stream")) - object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; return object; }; /** - * Converts this FinalizeStreamRequest to JSON. + * Converts this Stream to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.Stream * @instance * @returns {Object.} JSON object */ - FinalizeStreamRequest.prototype.toJSON = function toJSON() { + Stream.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return FinalizeStreamRequest; + return Stream; })(); - v1beta1.SplitReadStreamRequest = (function() { + v1beta1.StreamPosition = (function() { /** - * Properties of a SplitReadStreamRequest. + * Properties of a StreamPosition. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ISplitReadStreamRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [originalStream] SplitReadStreamRequest originalStream - * @property {number|null} [fraction] SplitReadStreamRequest fraction + * @interface IStreamPosition + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] StreamPosition stream + * @property {number|Long|null} [offset] StreamPosition offset */ /** - * Constructs a new SplitReadStreamRequest. + * Constructs a new StreamPosition. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a SplitReadStreamRequest. - * @implements ISplitReadStreamRequest + * @classdesc Represents a StreamPosition. + * @implements IStreamPosition * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set */ - function SplitReadStreamRequest(properties) { + function StreamPosition(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -8713,88 +10514,88 @@ } /** - * SplitReadStreamRequest originalStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} originalStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * StreamPosition stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @instance */ - SplitReadStreamRequest.prototype.originalStream = null; + StreamPosition.prototype.stream = null; /** - * SplitReadStreamRequest fraction. - * @member {number} fraction - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * StreamPosition offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @instance */ - SplitReadStreamRequest.prototype.fraction = 0; + StreamPosition.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Creates a new SplitReadStreamRequest instance using the specified properties. + * Creates a new StreamPosition instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest instance + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition instance */ - SplitReadStreamRequest.create = function create(properties) { - return new SplitReadStreamRequest(properties); + StreamPosition.create = function create(properties) { + return new StreamPosition(properties); }; /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SplitReadStreamRequest.encode = function encode(message, writer) { + StreamPosition.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.originalStream != null && Object.hasOwnProperty.call(message, "originalStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); return writer; }; /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + StreamPosition.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * Decodes a StreamPosition message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamRequest.decode = function decode(reader, length) { + StreamPosition.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); break; case 2: - message.fraction = reader.float(); + message.offset = reader.int64(); break; default: reader.skipType(tag & 7); @@ -8805,122 +10606,143 @@ }; /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + StreamPosition.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a SplitReadStreamRequest message. + * Verifies a StreamPosition message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - SplitReadStreamRequest.verify = function verify(message) { + StreamPosition.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.originalStream != null && message.hasOwnProperty("originalStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.originalStream); + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); if (error) - return "originalStream." + error; + return "stream." + error; } - if (message.fraction != null && message.hasOwnProperty("fraction")) - if (typeof message.fraction !== "number") - return "fraction: number expected"; + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; return null; }; /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition */ - SplitReadStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) + StreamPosition.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamPosition) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); - if (object.originalStream != null) { - if (typeof object.originalStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.originalStream: object expected"); - message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.originalStream); + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamPosition.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); } - if (object.fraction != null) - message.fraction = Number(object.fraction); + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); return message; }; /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @static - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} message StreamPosition * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - SplitReadStreamRequest.toObject = function toObject(message, options) { + StreamPosition.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - object.originalStream = null; - object.fraction = 0; + object.stream = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; } - if (message.originalStream != null && message.hasOwnProperty("originalStream")) - object.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.originalStream, options); - if (message.fraction != null && message.hasOwnProperty("fraction")) - object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; return object; }; /** - * Converts this SplitReadStreamRequest to JSON. + * Converts this StreamPosition to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition * @instance * @returns {Object.} JSON object */ - SplitReadStreamRequest.prototype.toJSON = function toJSON() { + StreamPosition.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return SplitReadStreamRequest; + return StreamPosition; })(); - v1beta1.SplitReadStreamResponse = (function() { + v1beta1.ReadSession = (function() { /** - * Properties of a SplitReadStreamResponse. + * Properties of a ReadSession. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ISplitReadStreamResponse - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [primaryStream] SplitReadStreamResponse primaryStream - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [remainderStream] SplitReadStreamResponse remainderStream + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {Array.|null} [streams] ReadSession streams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] ReadSession tableReference + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] ReadSession shardingStrategy */ /** - * Constructs a new SplitReadStreamResponse. + * Constructs a new ReadSession. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a SplitReadStreamResponse. - * @implements ISplitReadStreamResponse + * @classdesc Represents a ReadSession. + * @implements IReadSession * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set */ - function SplitReadStreamResponse(properties) { + function ReadSession(properties) { + this.streams = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -8928,88 +10750,183 @@ } /** - * SplitReadStreamResponse primaryStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} primaryStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @instance */ - SplitReadStreamResponse.prototype.primaryStream = null; + ReadSession.prototype.name = ""; /** - * SplitReadStreamResponse remainderStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} remainderStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @instance */ - SplitReadStreamResponse.prototype.remainderStream = null; + ReadSession.prototype.expireTime = null; /** - * Creates a new SplitReadStreamResponse instance using the specified properties. + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + /** + * ReadSession tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableReference = null; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.shardingStrategy = 0; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession instance */ - SplitReadStreamResponse.create = function create(properties) { - return new SplitReadStreamResponse(properties); + ReadSession.create = function create(properties) { + return new ReadSession(properties); }; /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SplitReadStreamResponse.encode = function encode(message, writer) { + ReadSession.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); return writer; }; /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * Decodes a ReadSession message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamResponse.decode = function decode(reader, length) { + ReadSession.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + message.name = reader.string(); break; case 2: - message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 5: + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); + break; + case 6: + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); + break; + case 4: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + case 7: + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + case 8: + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + case 9: + message.shardingStrategy = reader.int32(); break; default: reader.skipType(tag & 7); @@ -9020,128 +10937,244 @@ }; /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * Decodes a ReadSession message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + ReadSession.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a SplitReadStreamResponse message. + * Verifies a ReadSession message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - SplitReadStreamResponse.verify = function verify(message) { + ReadSession.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.primaryStream); + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); if (error) - return "primaryStream." + error; + return "expireTime." + error; } - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.remainderStream); + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); if (error) - return "remainderStream." + error; + return "tableReference." + error; + } + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession */ - SplitReadStreamResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadSession) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); - if (object.primaryStream != null) { - if (typeof object.primaryStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primaryStream: object expected"); - message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.primaryStream); - } - if (object.remainderStream != null) { - if (typeof object.remainderStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainderStream: object expected"); - message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.remainderStream); - } + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + switch (object.shardingStrategy) { + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } return message; }; /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @static - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} message ReadSession * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - SplitReadStreamResponse.toObject = function toObject(message, options) { + ReadSession.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.streams = []; if (options.defaults) { - object.primaryStream = null; - object.remainderStream = null; + object.name = ""; + object.expireTime = null; + object.tableReference = null; + object.tableModifiers = null; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; } - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) - object.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.primaryStream, options); - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) - object.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.remainderStream, options); + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; return object; }; /** - * Converts this SplitReadStreamResponse to JSON. + * Converts this ReadSession to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession * @instance * @returns {Object.} JSON object */ - SplitReadStreamResponse.prototype.toJSON = function toJSON() { + ReadSession.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return SplitReadStreamResponse; + return ReadSession; })(); - v1beta1.TableReference = (function() { + v1beta1.CreateReadSessionRequest = (function() { /** - * Properties of a TableReference. + * Properties of a CreateReadSessionRequest. * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableReference - * @property {string|null} [projectId] TableReference projectId - * @property {string|null} [datasetId] TableReference datasetId - * @property {string|null} [tableId] TableReference tableId + * @interface ICreateReadSessionRequest + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] CreateReadSessionRequest tableReference + * @property {string|null} [parent] CreateReadSessionRequest parent + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] CreateReadSessionRequest tableModifiers + * @property {number|null} [requestedStreams] CreateReadSessionRequest requestedStreams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null} [readOptions] CreateReadSessionRequest readOptions + * @property {google.cloud.bigquery.storage.v1beta1.DataFormat|null} [format] CreateReadSessionRequest format + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] CreateReadSessionRequest shardingStrategy */ /** - * Constructs a new TableReference. + * Constructs a new CreateReadSessionRequest. * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableReference. - * @implements ITableReference + * @classdesc Represents a CreateReadSessionRequest. + * @implements ICreateReadSessionRequest * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set */ - function TableReference(properties) { + function CreateReadSessionRequest(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -9149,103 +11182,155 @@ } /** - * TableReference projectId. - * @member {string} projectId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * CreateReadSessionRequest tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @instance */ - TableReference.prototype.projectId = ""; + CreateReadSessionRequest.prototype.tableReference = null; /** - * TableReference datasetId. - * @member {string} datasetId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * CreateReadSessionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @instance */ - TableReference.prototype.datasetId = ""; + CreateReadSessionRequest.prototype.parent = ""; /** - * TableReference tableId. - * @member {string} tableId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * CreateReadSessionRequest tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @instance */ - TableReference.prototype.tableId = ""; + CreateReadSessionRequest.prototype.tableModifiers = null; /** - * Creates a new TableReference instance using the specified properties. + * CreateReadSessionRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.requestedStreams = 0; + + /** + * CreateReadSessionRequest readOptions. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.readOptions = null; + + /** + * CreateReadSessionRequest format. + * @member {google.cloud.bigquery.storage.v1beta1.DataFormat} format + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.format = 0; + + /** + * CreateReadSessionRequest shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.shardingStrategy = 0; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest instance */ - TableReference.create = function create(properties) { - return new TableReference(properties); + CreateReadSessionRequest.create = function create(properties) { + return new CreateReadSessionRequest(properties); }; /** - * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReference.encode = function encode(message, writer) { + CreateReadSessionRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.projectId != null && Object.hasOwnProperty.call(message, "projectId")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); - if (message.datasetId != null && Object.hasOwnProperty.call(message, "datasetId")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); - if (message.tableId != null && Object.hasOwnProperty.call(message, "tableId")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) + $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.format != null && Object.hasOwnProperty.call(message, "format")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); return writer; }; /** - * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - TableReference.encodeDelimited = function encodeDelimited(message, writer) { + CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a TableReference message from the specified reader or buffer. + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReference.decode = function decode(reader, length) { + CreateReadSessionRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.projectId = reader.string(); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + case 6: + message.parent = reader.string(); break; case 2: - message.datasetId = reader.string(); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); break; case 3: - message.tableId = reader.string(); + message.requestedStreams = reader.int32(); break; - default: + case 4: + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); + break; + case 5: + message.format = reader.int32(); + break; + case 7: + message.shardingStrategy = reader.int32(); + break; + default: reader.skipType(tag & 7); break; } @@ -9254,578 +11339,6290 @@ }; /** - * Decodes a TableReference message from the specified reader or buffer, length delimited. + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReference.decodeDelimited = function decodeDelimited(reader) { + CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a TableReference message. + * Verifies a CreateReadSessionRequest message. * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - TableReference.verify = function verify(message) { + CreateReadSessionRequest.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.projectId != null && message.hasOwnProperty("projectId")) - if (!$util.isString(message.projectId)) - return "projectId: string expected"; - if (message.datasetId != null && message.hasOwnProperty("datasetId")) - if (!$util.isString(message.datasetId)) - return "datasetId: string expected"; - if (message.tableId != null && message.hasOwnProperty("tableId")) - if (!$util.isString(message.tableId)) - return "tableId: string expected"; + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (error) + return "tableReference." + error; + } + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.format != null && message.hasOwnProperty("format")) + switch (message.format) { + default: + return "format: enum value expected"; + case 0: + case 1: + case 3: + break; + } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; /** - * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest */ - TableReference.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReference) + CreateReadSessionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); - if (object.projectId != null) - message.projectId = String(object.projectId); - if (object.datasetId != null) - message.datasetId = String(object.datasetId); - if (object.tableId != null) - message.tableId = String(object.tableId); + var message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.parent != null) + message.parent = String(object.parent); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); + } + switch (object.format) { + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.format = 0; + break; + case "AVRO": + case 1: + message.format = 1; + break; + case "ARROW": + case 3: + message.format = 3; + break; + } + switch (object.shardingStrategy) { + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } return message; }; /** - * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableReference} message TableReference + * @param {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} message CreateReadSessionRequest * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - TableReference.toObject = function toObject(message, options) { + CreateReadSessionRequest.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - object.projectId = ""; - object.datasetId = ""; - object.tableId = ""; + object.tableReference = null; + object.tableModifiers = null; + object.requestedStreams = 0; + object.readOptions = null; + object.format = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.parent = ""; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; } - if (message.projectId != null && message.hasOwnProperty("projectId")) - object.projectId = message.projectId; - if (message.datasetId != null && message.hasOwnProperty("datasetId")) - object.datasetId = message.datasetId; - if (message.tableId != null && message.hasOwnProperty("tableId")) - object.tableId = message.tableId; + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); + if (message.format != null && message.hasOwnProperty("format")) + object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; return object; }; /** - * Converts this TableReference to JSON. + * Converts this CreateReadSessionRequest to JSON. * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest * @instance * @returns {Object.} JSON object */ - TableReference.prototype.toJSON = function toJSON() { + CreateReadSessionRequest.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return TableReference; - })(); + return CreateReadSessionRequest; + })(); + + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1beta1.DataFormat + * @enum {number} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=3 ARROW value + */ + v1beta1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[3] = "ARROW"] = 3; + return values; + })(); + + /** + * ShardingStrategy enum. + * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy + * @enum {number} + * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value + * @property {number} LIQUID=1 LIQUID value + * @property {number} BALANCED=2 BALANCED value + */ + v1beta1.ShardingStrategy = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; + values[valuesById[1] = "LIQUID"] = 1; + values[valuesById[2] = "BALANCED"] = 2; + return values; + })(); + + v1beta1.ReadRowsRequest = (function() { + + /** + * Properties of a ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null} [readPosition] ReadRowsRequest readPosition + */ + + /** + * Constructs a new ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsRequest. + * @implements IReadRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + */ + function ReadRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsRequest readPosition. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null|undefined} readPosition + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.readPosition = null; + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest instance + */ + ReadRowsRequest.create = function create(properties) { + return new ReadRowsRequest(properties); + }; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.readPosition != null && Object.hasOwnProperty.call(message, "readPosition")) + $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.verify(message.readPosition); + if (error) + return "readPosition." + error; + } + return null; + }; + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + */ + ReadRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + if (object.readPosition != null) { + if (typeof object.readPosition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.readPosition: object expected"); + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.fromObject(object.readPosition); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} message ReadRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.readPosition = null; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) + object.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.toObject(message.readPosition, options); + return object; + }; + + /** + * Converts this ReadRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + * @returns {Object.} JSON object + */ + ReadRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsRequest; + })(); + + v1beta1.StreamStatus = (function() { + + /** + * Properties of a StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStreamStatus + * @property {number|Long|null} [estimatedRowCount] StreamStatus estimatedRowCount + * @property {number|null} [fractionConsumed] StreamStatus fractionConsumed + * @property {google.cloud.bigquery.storage.v1beta1.IProgress|null} [progress] StreamStatus progress + * @property {boolean|null} [isSplittable] StreamStatus isSplittable + */ + + /** + * Constructs a new StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a StreamStatus. + * @implements IStreamStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + */ + function StreamStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamStatus estimatedRowCount. + * @member {number|Long} estimatedRowCount + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * StreamStatus fractionConsumed. + * @member {number} fractionConsumed + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.fractionConsumed = 0; + + /** + * StreamStatus progress. + * @member {google.cloud.bigquery.storage.v1beta1.IProgress|null|undefined} progress + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.progress = null; + + /** + * StreamStatus isSplittable. + * @member {boolean} isSplittable + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.isSplittable = false; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus instance + */ + StreamStatus.create = function create(properties) { + return new StreamStatus(properties); + }; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); + if (message.fractionConsumed != null && Object.hasOwnProperty.call(message, "fractionConsumed")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); + if (message.isSplittable != null && Object.hasOwnProperty.call(message, "isSplittable")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) + $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.estimatedRowCount = reader.int64(); + break; + case 2: + message.fractionConsumed = reader.float(); + break; + case 4: + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); + break; + case 3: + message.isSplittable = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) + return "estimatedRowCount: integer|Long expected"; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + if (typeof message.fractionConsumed !== "number") + return "fractionConsumed: number expected"; + if (message.progress != null && message.hasOwnProperty("progress")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Progress.verify(message.progress); + if (error) + return "progress." + error; + } + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + if (typeof message.isSplittable !== "boolean") + return "isSplittable: boolean expected"; + return null; + }; + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + */ + StreamStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + if (object.estimatedRowCount != null) + if ($util.Long) + (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; + else if (typeof object.estimatedRowCount === "string") + message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); + else if (typeof object.estimatedRowCount === "number") + message.estimatedRowCount = object.estimatedRowCount; + else if (typeof object.estimatedRowCount === "object") + message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); + if (object.fractionConsumed != null) + message.fractionConsumed = Number(object.fractionConsumed); + if (object.progress != null) { + if (typeof object.progress !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamStatus.progress: object expected"); + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.fromObject(object.progress); + } + if (object.isSplittable != null) + message.isSplittable = Boolean(object.isSplittable); + return message; + }; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.StreamStatus} message StreamStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedRowCount = options.longs === String ? "0" : 0; + object.fractionConsumed = 0; + object.isSplittable = false; + object.progress = null; + } + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (typeof message.estimatedRowCount === "number") + object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; + else + object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + object.fractionConsumed = options.json && !isFinite(message.fractionConsumed) ? String(message.fractionConsumed) : message.fractionConsumed; + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + object.isSplittable = message.isSplittable; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.toObject(message.progress, options); + return object; + }; + + /** + * Converts this StreamStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + * @returns {Object.} JSON object + */ + StreamStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return StreamStatus; + })(); + + v1beta1.Progress = (function() { + + /** + * Properties of a Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IProgress + * @property {number|null} [atResponseStart] Progress atResponseStart + * @property {number|null} [atResponseEnd] Progress atResponseEnd + */ + + /** + * Constructs a new Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Progress. + * @implements IProgress + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + */ + function Progress(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Progress atResponseStart. + * @member {number} atResponseStart + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseStart = 0; + + /** + * Progress atResponseEnd. + * @member {number} atResponseEnd + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseEnd = 0; + + /** + * Creates a new Progress instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress instance + */ + Progress.create = function create(properties) { + return new Progress(properties); + }; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) + writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); + if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); + return writer; + }; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.atResponseStart = reader.float(); + break; + case 2: + message.atResponseEnd = reader.float(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Progress message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Progress.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (typeof message.atResponseStart !== "number") + return "atResponseStart: number expected"; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (typeof message.atResponseEnd !== "number") + return "atResponseEnd: number expected"; + return null; + }; + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + */ + Progress.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Progress) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + if (object.atResponseStart != null) + message.atResponseStart = Number(object.atResponseStart); + if (object.atResponseEnd != null) + message.atResponseEnd = Number(object.atResponseEnd); + return message; + }; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.Progress} message Progress + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Progress.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.atResponseStart = 0; + object.atResponseEnd = 0; + } + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + return object; + }; + + /** + * Converts this Progress to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + * @returns {Object.} JSON object + */ + Progress.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Progress; + })(); + + v1beta1.ThrottleStatus = (function() { + + /** + * Properties of a ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IThrottleStatus + * @property {number|null} [throttlePercent] ThrottleStatus throttlePercent + */ + + /** + * Constructs a new ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ThrottleStatus. + * @implements IThrottleStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + */ + function ThrottleStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ThrottleStatus throttlePercent. + * @member {number} throttlePercent + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + */ + ThrottleStatus.prototype.throttlePercent = 0; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus instance + */ + ThrottleStatus.create = function create(properties) { + return new ThrottleStatus(properties); + }; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + return writer; + }; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.throttlePercent = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ThrottleStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ThrottleStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (!$util.isInteger(message.throttlePercent)) + return "throttlePercent: integer expected"; + return null; + }; + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + */ + ThrottleStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + if (object.throttlePercent != null) + message.throttlePercent = object.throttlePercent | 0; + return message; + }; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} message ThrottleStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ThrottleStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.throttlePercent = 0; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + object.throttlePercent = message.throttlePercent; + return object; + }; + + /** + * Converts this ThrottleStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + * @returns {Object.} JSON object + */ + ThrottleStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ThrottleStatus; + })(); + + v1beta1.ReadRowsResponse = (function() { + + /** + * Properties of a ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsResponse + * @property {google.cloud.bigquery.storage.v1beta1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows + * @property {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch + * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount + * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status + * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus + */ + + /** + * Constructs a new ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsResponse. + * @implements IReadRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + */ + function ReadRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsResponse avroRows. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroRows|null|undefined} avroRows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroRows = null; + + /** + * ReadRowsResponse arrowRecordBatch. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null|undefined} arrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowRecordBatch = null; + + /** + * ReadRowsResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadRowsResponse status. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null|undefined} status + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.status = null; + + /** + * ReadRowsResponse throttleStatus. + * @member {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null|undefined} throttleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.throttleStatus = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadRowsResponse rows. + * @member {"avroRows"|"arrowRecordBatch"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse instance + */ + ReadRowsResponse.create = function create(properties) { + return new ReadRowsResponse(properties); + }; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.status != null && Object.hasOwnProperty.call(message, "status")) + $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) + $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.throttleStatus != null && Object.hasOwnProperty.call(message, "throttleStatus")) + $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); + break; + case 4: + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + case 6: + message.rowCount = reader.int64(); + break; + case 2: + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); + break; + case 5: + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.verify(message.avroRows); + if (error) + return "avroRows." + error; + } + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify(message.arrowRecordBatch); + if (error) + return "arrowRecordBatch." + error; + } + } + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + if (message.status != null && message.hasOwnProperty("status")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.verify(message.status); + if (error) + return "status." + error; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify(message.throttleStatus); + if (error) + return "throttleStatus." + error; + } + return null; + }; + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + */ + ReadRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + if (object.avroRows != null) { + if (typeof object.avroRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroRows: object expected"); + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.fromObject(object.avroRows); + } + if (object.arrowRecordBatch != null) { + if (typeof object.arrowRecordBatch !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowRecordBatch: object expected"); + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); + } + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + if (object.status != null) { + if (typeof object.status !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status: object expected"); + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.fromObject(object.status); + } + if (object.throttleStatus != null) { + if (typeof object.throttleStatus !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} message ReadRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.status = null; + object.throttleStatus = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.status != null && message.hasOwnProperty("status")) + object.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.toObject(message.status, options); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + object.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.toObject(message.avroRows, options); + if (options.oneofs) + object.rows = "avroRows"; + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); + if (options.oneofs) + object.rows = "arrowRecordBatch"; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) + object.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.toObject(message.throttleStatus, options); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ReadRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + * @returns {Object.} JSON object + */ + ReadRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReadRowsResponse; + })(); + + v1beta1.BatchCreateReadSessionStreamsRequest = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IReadSession|null} [session] BatchCreateReadSessionStreamsRequest session + * @property {number|null} [requestedStreams] BatchCreateReadSessionStreamsRequest requestedStreams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsRequest. + * @implements IBatchCreateReadSessionStreamsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsRequest session. + * @member {google.cloud.bigquery.storage.v1beta1.IReadSession|null|undefined} session + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.session = null; + + /** + * BatchCreateReadSessionStreamsRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.requestedStreams = 0; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest instance + */ + BatchCreateReadSessionStreamsRequest.create = function create(properties) { + return new BatchCreateReadSessionStreamsRequest(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.session != null && Object.hasOwnProperty.call(message, "session")) + $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); + break; + case 2: + message.requestedStreams = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.session != null && message.hasOwnProperty("session")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.verify(message.session); + if (error) + return "session." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + */ + BatchCreateReadSessionStreamsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + if (object.session != null) { + if (typeof object.session !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session: object expected"); + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.fromObject(object.session); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.session = null; + object.requestedStreams = 0; + } + if (message.session != null && message.hasOwnProperty("session")) + object.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.toObject(message.session, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return BatchCreateReadSessionStreamsRequest; + })(); + + v1beta1.BatchCreateReadSessionStreamsResponse = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsResponse + * @property {Array.|null} [streams] BatchCreateReadSessionStreamsResponse streams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsResponse. + * @implements IBatchCreateReadSessionStreamsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsResponse(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsResponse streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + */ + BatchCreateReadSessionStreamsResponse.prototype.streams = $util.emptyArray; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse instance + */ + BatchCreateReadSessionStreamsResponse.create = function create(properties) { + return new BatchCreateReadSessionStreamsResponse(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + */ + BatchCreateReadSessionStreamsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return BatchCreateReadSessionStreamsResponse; + })(); + + v1beta1.FinalizeStreamRequest = (function() { + + /** + * Properties of a FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IFinalizeStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] FinalizeStreamRequest stream + */ + + /** + * Constructs a new FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a FinalizeStreamRequest. + * @implements IFinalizeStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + */ + function FinalizeStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FinalizeStreamRequest stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + */ + FinalizeStreamRequest.prototype.stream = null; + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest instance + */ + FinalizeStreamRequest.create = function create(properties) { + return new FinalizeStreamRequest(properties); + }; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FinalizeStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FinalizeStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + if (error) + return "stream." + error; + } + return null; + }; + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + */ + FinalizeStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + } + return message; + }; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} message FinalizeStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FinalizeStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.stream = null; + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + return object; + }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + * @returns {Object.} JSON object + */ + FinalizeStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FinalizeStreamRequest; + })(); + + v1beta1.SplitReadStreamRequest = (function() { + + /** + * Properties of a SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [originalStream] SplitReadStreamRequest originalStream + * @property {number|null} [fraction] SplitReadStreamRequest fraction + */ + + /** + * Constructs a new SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamRequest. + * @implements ISplitReadStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + */ + function SplitReadStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamRequest originalStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} originalStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.originalStream = null; + + /** + * SplitReadStreamRequest fraction. + * @member {number} fraction + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.fraction = 0; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest instance + */ + SplitReadStreamRequest.create = function create(properties) { + return new SplitReadStreamRequest(properties); + }; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.originalStream != null && Object.hasOwnProperty.call(message, "originalStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + case 2: + message.fraction = reader.float(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.originalStream != null && message.hasOwnProperty("originalStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.originalStream); + if (error) + return "originalStream." + error; + } + if (message.fraction != null && message.hasOwnProperty("fraction")) + if (typeof message.fraction !== "number") + return "fraction: number expected"; + return null; + }; + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + */ + SplitReadStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + if (object.originalStream != null) { + if (typeof object.originalStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.originalStream: object expected"); + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.originalStream); + } + if (object.fraction != null) + message.fraction = Number(object.fraction); + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.originalStream = null; + object.fraction = 0; + } + if (message.originalStream != null && message.hasOwnProperty("originalStream")) + object.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.originalStream, options); + if (message.fraction != null && message.hasOwnProperty("fraction")) + object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + return object; + }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamRequest; + })(); + + v1beta1.SplitReadStreamResponse = (function() { + + /** + * Properties of a SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamResponse + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [primaryStream] SplitReadStreamResponse primaryStream + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [remainderStream] SplitReadStreamResponse remainderStream + */ + + /** + * Constructs a new SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamResponse. + * @implements ISplitReadStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + */ + function SplitReadStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamResponse primaryStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} primaryStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.primaryStream = null; + + /** + * SplitReadStreamResponse remainderStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} remainderStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.remainderStream = null; + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse instance + */ + SplitReadStreamResponse.create = function create(properties) { + return new SplitReadStreamResponse(properties); + }; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + case 2: + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.primaryStream); + if (error) + return "primaryStream." + error; + } + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.remainderStream); + if (error) + return "remainderStream." + error; + } + return null; + }; + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + */ + SplitReadStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + if (object.primaryStream != null) { + if (typeof object.primaryStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primaryStream: object expected"); + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.primaryStream); + } + if (object.remainderStream != null) { + if (typeof object.remainderStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainderStream: object expected"); + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.remainderStream); + } + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.primaryStream = null; + object.remainderStream = null; + } + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + object.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.primaryStream, options); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + object.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.remainderStream, options); + return object; + }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return SplitReadStreamResponse; + })(); + + v1beta1.TableReference = (function() { + + /** + * Properties of a TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableReference + * @property {string|null} [projectId] TableReference projectId + * @property {string|null} [datasetId] TableReference datasetId + * @property {string|null} [tableId] TableReference tableId + */ + + /** + * Constructs a new TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableReference. + * @implements ITableReference + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + */ + function TableReference(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReference projectId. + * @member {string} projectId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.projectId = ""; + + /** + * TableReference datasetId. + * @member {string} datasetId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.datasetId = ""; + + /** + * TableReference tableId. + * @member {string} tableId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.tableId = ""; + + /** + * Creates a new TableReference instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference instance + */ + TableReference.create = function create(properties) { + return new TableReference(properties); + }; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.projectId != null && Object.hasOwnProperty.call(message, "projectId")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); + if (message.datasetId != null && Object.hasOwnProperty.call(message, "datasetId")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); + if (message.tableId != null && Object.hasOwnProperty.call(message, "tableId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); + return writer; + }; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.projectId = reader.string(); + break; + case 2: + message.datasetId = reader.string(); + break; + case 3: + message.tableId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReference message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReference.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.projectId != null && message.hasOwnProperty("projectId")) + if (!$util.isString(message.projectId)) + return "projectId: string expected"; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + if (!$util.isString(message.datasetId)) + return "datasetId: string expected"; + if (message.tableId != null && message.hasOwnProperty("tableId")) + if (!$util.isString(message.tableId)) + return "tableId: string expected"; + return null; + }; + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + */ + TableReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReference) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + if (object.projectId != null) + message.projectId = String(object.projectId); + if (object.datasetId != null) + message.datasetId = String(object.datasetId); + if (object.tableId != null) + message.tableId = String(object.tableId); + return message; + }; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableReference} message TableReference + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReference.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.projectId = ""; + object.datasetId = ""; + object.tableId = ""; + } + if (message.projectId != null && message.hasOwnProperty("projectId")) + object.projectId = message.projectId; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + object.datasetId = message.datasetId; + if (message.tableId != null && message.hasOwnProperty("tableId")) + object.tableId = message.tableId; + return object; + }; + + /** + * Converts this TableReference to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + * @returns {Object.} JSON object + */ + TableReference.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableReference; + })(); + + v1beta1.TableModifiers = (function() { + + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ + + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; + + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; + + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return TableModifiers; + })(); + + return v1beta1; + })(); + + return storage; + })(); + + return bigquery; + })(); + + return cloud; + })(); + + google.protobuf = (function() { + + /** + * Namespace protobuf. + * @memberof google + * @namespace + */ + var protobuf = {}; + + protobuf.FileDescriptorSet = (function() { + + /** + * Properties of a FileDescriptorSet. + * @memberof google.protobuf + * @interface IFileDescriptorSet + * @property {Array.|null} [file] FileDescriptorSet file + */ + + /** + * Constructs a new FileDescriptorSet. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorSet. + * @implements IFileDescriptorSet + * @constructor + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + */ + function FileDescriptorSet(properties) { + this.file = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorSet file. + * @member {Array.} file + * @memberof google.protobuf.FileDescriptorSet + * @instance + */ + FileDescriptorSet.prototype.file = $util.emptyArray; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet instance + */ + FileDescriptorSet.create = function create(properties) { + return new FileDescriptorSet(properties); + }; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.file != null && message.file.length) + for (var i = 0; i < message.file.length; ++i) + $root.google.protobuf.FileDescriptorProto.encode(message.file[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.file && message.file.length)) + message.file = []; + message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorSet message. + * @function verify + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorSet.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.file != null && message.hasOwnProperty("file")) { + if (!Array.isArray(message.file)) + return "file: array expected"; + for (var i = 0; i < message.file.length; ++i) { + var error = $root.google.protobuf.FileDescriptorProto.verify(message.file[i]); + if (error) + return "file." + error; + } + } + return null; + }; + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + */ + FileDescriptorSet.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorSet) + return object; + var message = new $root.google.protobuf.FileDescriptorSet(); + if (object.file) { + if (!Array.isArray(object.file)) + throw TypeError(".google.protobuf.FileDescriptorSet.file: array expected"); + message.file = []; + for (var i = 0; i < object.file.length; ++i) { + if (typeof object.file[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorSet.file: object expected"); + message.file[i] = $root.google.protobuf.FileDescriptorProto.fromObject(object.file[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.FileDescriptorSet} message FileDescriptorSet + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorSet.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.file = []; + if (message.file && message.file.length) { + object.file = []; + for (var j = 0; j < message.file.length; ++j) + object.file[j] = $root.google.protobuf.FileDescriptorProto.toObject(message.file[j], options); + } + return object; + }; + + /** + * Converts this FileDescriptorSet to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorSet + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorSet.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FileDescriptorSet; + })(); + + protobuf.FileDescriptorProto = (function() { + + /** + * Properties of a FileDescriptorProto. + * @memberof google.protobuf + * @interface IFileDescriptorProto + * @property {string|null} [name] FileDescriptorProto name + * @property {string|null} ["package"] FileDescriptorProto package + * @property {Array.|null} [dependency] FileDescriptorProto dependency + * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency + * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [messageType] FileDescriptorProto messageType + * @property {Array.|null} [enumType] FileDescriptorProto enumType + * @property {Array.|null} [service] FileDescriptorProto service + * @property {Array.|null} [extension] FileDescriptorProto extension + * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options + * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo + * @property {string|null} [syntax] FileDescriptorProto syntax + */ + + /** + * Constructs a new FileDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorProto. + * @implements IFileDescriptorProto + * @constructor + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + */ + function FileDescriptorProto(properties) { + this.dependency = []; + this.publicDependency = []; + this.weakDependency = []; + this.messageType = []; + this.enumType = []; + this.service = []; + this.extension = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.name = ""; + + /** + * FileDescriptorProto package. + * @member {string} package + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype["package"] = ""; + + /** + * FileDescriptorProto dependency. + * @member {Array.} dependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.dependency = $util.emptyArray; + + /** + * FileDescriptorProto publicDependency. + * @member {Array.} publicDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.publicDependency = $util.emptyArray; + + /** + * FileDescriptorProto weakDependency. + * @member {Array.} weakDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + + /** + * FileDescriptorProto messageType. + * @member {Array.} messageType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.messageType = $util.emptyArray; + + /** + * FileDescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * FileDescriptorProto service. + * @member {Array.} service + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.service = $util.emptyArray; + + /** + * FileDescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.extension = $util.emptyArray; + + /** + * FileDescriptorProto options. + * @member {google.protobuf.IFileOptions|null|undefined} options + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.options = null; + + /** + * FileDescriptorProto sourceCodeInfo. + * @member {google.protobuf.ISourceCodeInfo|null|undefined} sourceCodeInfo + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.sourceCodeInfo = null; + + /** + * FileDescriptorProto syntax. + * @member {string} syntax + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.syntax = ""; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto instance + */ + FileDescriptorProto.create = function create(properties) { + return new FileDescriptorProto(properties); + }; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); + if (message.dependency != null && message.dependency.length) + for (var i = 0; i < message.dependency.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.dependency[i]); + if (message.messageType != null && message.messageType.length) + for (var i = 0; i < message.messageType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.messageType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.service != null && message.service.length) + for (var i = 0; i < message.service.length; ++i) + $root.google.protobuf.ServiceDescriptorProto.encode(message.service[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) + $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.publicDependency != null && message.publicDependency.length) + for (var i = 0; i < message.publicDependency.length; ++i) + writer.uint32(/* id 10, wireType 0 =*/80).int32(message.publicDependency[i]); + if (message.weakDependency != null && message.weakDependency.length) + for (var i = 0; i < message.weakDependency.length; ++i) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); + if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); + return writer; + }; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message["package"] = reader.string(); + break; + case 3: + if (!(message.dependency && message.dependency.length)) + message.dependency = []; + message.dependency.push(reader.string()); + break; + case 10: + if (!(message.publicDependency && message.publicDependency.length)) + message.publicDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.publicDependency.push(reader.int32()); + } else + message.publicDependency.push(reader.int32()); + break; + case 11: + if (!(message.weakDependency && message.weakDependency.length)) + message.weakDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.weakDependency.push(reader.int32()); + } else + message.weakDependency.push(reader.int32()); + break; + case 4: + if (!(message.messageType && message.messageType.length)) + message.messageType = []; + message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + if (!(message.service && message.service.length)) + message.service = []; + message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorProto message. + * @function verify + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message["package"] != null && message.hasOwnProperty("package")) + if (!$util.isString(message["package"])) + return "package: string expected"; + if (message.dependency != null && message.hasOwnProperty("dependency")) { + if (!Array.isArray(message.dependency)) + return "dependency: array expected"; + for (var i = 0; i < message.dependency.length; ++i) + if (!$util.isString(message.dependency[i])) + return "dependency: string[] expected"; + } + if (message.publicDependency != null && message.hasOwnProperty("publicDependency")) { + if (!Array.isArray(message.publicDependency)) + return "publicDependency: array expected"; + for (var i = 0; i < message.publicDependency.length; ++i) + if (!$util.isInteger(message.publicDependency[i])) + return "publicDependency: integer[] expected"; + } + if (message.weakDependency != null && message.hasOwnProperty("weakDependency")) { + if (!Array.isArray(message.weakDependency)) + return "weakDependency: array expected"; + for (var i = 0; i < message.weakDependency.length; ++i) + if (!$util.isInteger(message.weakDependency[i])) + return "weakDependency: integer[] expected"; + } + if (message.messageType != null && message.hasOwnProperty("messageType")) { + if (!Array.isArray(message.messageType)) + return "messageType: array expected"; + for (var i = 0; i < message.messageType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.messageType[i]); + if (error) + return "messageType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.service != null && message.hasOwnProperty("service")) { + if (!Array.isArray(message.service)) + return "service: array expected"; + for (var i = 0; i < message.service.length; ++i) { + var error = $root.google.protobuf.ServiceDescriptorProto.verify(message.service[i]); + if (error) + return "service." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FileOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) { + var error = $root.google.protobuf.SourceCodeInfo.verify(message.sourceCodeInfo); + if (error) + return "sourceCodeInfo." + error; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + if (!$util.isString(message.syntax)) + return "syntax: string expected"; + return null; + }; + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + */ + FileDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorProto) + return object; + var message = new $root.google.protobuf.FileDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object["package"] != null) + message["package"] = String(object["package"]); + if (object.dependency) { + if (!Array.isArray(object.dependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.dependency: array expected"); + message.dependency = []; + for (var i = 0; i < object.dependency.length; ++i) + message.dependency[i] = String(object.dependency[i]); + } + if (object.publicDependency) { + if (!Array.isArray(object.publicDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.publicDependency: array expected"); + message.publicDependency = []; + for (var i = 0; i < object.publicDependency.length; ++i) + message.publicDependency[i] = object.publicDependency[i] | 0; + } + if (object.weakDependency) { + if (!Array.isArray(object.weakDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.weakDependency: array expected"); + message.weakDependency = []; + for (var i = 0; i < object.weakDependency.length; ++i) + message.weakDependency[i] = object.weakDependency[i] | 0; + } + if (object.messageType) { + if (!Array.isArray(object.messageType)) + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); + message.messageType = []; + for (var i = 0; i < object.messageType.length; ++i) { + if (typeof object.messageType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: object expected"); + message.messageType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.messageType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.service) { + if (!Array.isArray(object.service)) + throw TypeError(".google.protobuf.FileDescriptorProto.service: array expected"); + message.service = []; + for (var i = 0; i < object.service.length; ++i) { + if (typeof object.service[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.service: object expected"); + message.service[i] = $root.google.protobuf.ServiceDescriptorProto.fromObject(object.service[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.FileDescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FileOptions.fromObject(object.options); + } + if (object.sourceCodeInfo != null) { + if (typeof object.sourceCodeInfo !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.sourceCodeInfo: object expected"); + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.fromObject(object.sourceCodeInfo); + } + if (object.syntax != null) + message.syntax = String(object.syntax); + return message; + }; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.FileDescriptorProto} message FileDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.dependency = []; + object.messageType = []; + object.enumType = []; + object.service = []; + object.extension = []; + object.publicDependency = []; + object.weakDependency = []; + } + if (options.defaults) { + object.name = ""; + object["package"] = ""; + object.options = null; + object.sourceCodeInfo = null; + object.syntax = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message["package"] != null && message.hasOwnProperty("package")) + object["package"] = message["package"]; + if (message.dependency && message.dependency.length) { + object.dependency = []; + for (var j = 0; j < message.dependency.length; ++j) + object.dependency[j] = message.dependency[j]; + } + if (message.messageType && message.messageType.length) { + object.messageType = []; + for (var j = 0; j < message.messageType.length; ++j) + object.messageType[j] = $root.google.protobuf.DescriptorProto.toObject(message.messageType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.service && message.service.length) { + object.service = []; + for (var j = 0; j < message.service.length; ++j) + object.service[j] = $root.google.protobuf.ServiceDescriptorProto.toObject(message.service[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FileOptions.toObject(message.options, options); + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + object.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.toObject(message.sourceCodeInfo, options); + if (message.publicDependency && message.publicDependency.length) { + object.publicDependency = []; + for (var j = 0; j < message.publicDependency.length; ++j) + object.publicDependency[j] = message.publicDependency[j]; + } + if (message.weakDependency && message.weakDependency.length) { + object.weakDependency = []; + for (var j = 0; j < message.weakDependency.length; ++j) + object.weakDependency[j] = message.weakDependency[j]; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + object.syntax = message.syntax; + return object; + }; + + /** + * Converts this FileDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return FileDescriptorProto; + })(); + + protobuf.DescriptorProto = (function() { + + /** + * Properties of a DescriptorProto. + * @memberof google.protobuf + * @interface IDescriptorProto + * @property {string|null} [name] DescriptorProto name + * @property {Array.|null} [field] DescriptorProto field + * @property {Array.|null} [extension] DescriptorProto extension + * @property {Array.|null} [nestedType] DescriptorProto nestedType + * @property {Array.|null} [enumType] DescriptorProto enumType + * @property {Array.|null} [extensionRange] DescriptorProto extensionRange + * @property {Array.|null} [oneofDecl] DescriptorProto oneofDecl + * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options + * @property {Array.|null} [reservedRange] DescriptorProto reservedRange + * @property {Array.|null} [reservedName] DescriptorProto reservedName + */ + + /** + * Constructs a new DescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a DescriptorProto. + * @implements IDescriptorProto + * @constructor + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + */ + function DescriptorProto(properties) { + this.field = []; + this.extension = []; + this.nestedType = []; + this.enumType = []; + this.extensionRange = []; + this.oneofDecl = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DescriptorProto name. + * @member {string} name + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.name = ""; + + /** + * DescriptorProto field. + * @member {Array.} field + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.field = $util.emptyArray; + + /** + * DescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extension = $util.emptyArray; + + /** + * DescriptorProto nestedType. + * @member {Array.} nestedType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.nestedType = $util.emptyArray; + + /** + * DescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * DescriptorProto extensionRange. + * @member {Array.} extensionRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extensionRange = $util.emptyArray; + + /** + * DescriptorProto oneofDecl. + * @member {Array.} oneofDecl + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.oneofDecl = $util.emptyArray; + + /** + * DescriptorProto options. + * @member {google.protobuf.IMessageOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.options = null; + + /** + * DescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedRange = $util.emptyArray; + + /** + * DescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedName = $util.emptyArray; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto} DescriptorProto instance + */ + DescriptorProto.create = function create(properties) { + return new DescriptorProto(properties); + }; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.field != null && message.field.length) + for (var i = 0; i < message.field.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.field[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.nestedType != null && message.nestedType.length) + for (var i = 0; i < message.nestedType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.nestedType[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.extensionRange != null && message.extensionRange.length) + for (var i = 0; i < message.extensionRange.length; ++i) + $root.google.protobuf.DescriptorProto.ExtensionRange.encode(message.extensionRange[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.oneofDecl != null && message.oneofDecl.length) + for (var i = 0; i < message.oneofDecl.length; ++i) + $root.google.protobuf.OneofDescriptorProto.encode(message.oneofDecl[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.DescriptorProto.ReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + return writer; + }; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + if (!(message.field && message.field.length)) + message.field = []; + message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + if (!(message.nestedType && message.nestedType.length)) + message.nestedType = []; + message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.extensionRange && message.extensionRange.length)) + message.extensionRange = []; + message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + if (!(message.oneofDecl && message.oneofDecl.length)) + message.oneofDecl = []; + message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DescriptorProto message. + * @function verify + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.field != null && message.hasOwnProperty("field")) { + if (!Array.isArray(message.field)) + return "field: array expected"; + for (var i = 0; i < message.field.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.field[i]); + if (error) + return "field." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.nestedType != null && message.hasOwnProperty("nestedType")) { + if (!Array.isArray(message.nestedType)) + return "nestedType: array expected"; + for (var i = 0; i < message.nestedType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.nestedType[i]); + if (error) + return "nestedType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.extensionRange != null && message.hasOwnProperty("extensionRange")) { + if (!Array.isArray(message.extensionRange)) + return "extensionRange: array expected"; + for (var i = 0; i < message.extensionRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ExtensionRange.verify(message.extensionRange[i]); + if (error) + return "extensionRange." + error; + } + } + if (message.oneofDecl != null && message.hasOwnProperty("oneofDecl")) { + if (!Array.isArray(message.oneofDecl)) + return "oneofDecl: array expected"; + for (var i = 0; i < message.oneofDecl.length; ++i) { + var error = $root.google.protobuf.OneofDescriptorProto.verify(message.oneofDecl[i]); + if (error) + return "oneofDecl." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MessageOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto} DescriptorProto + */ + DescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto) + return object; + var message = new $root.google.protobuf.DescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.field) { + if (!Array.isArray(object.field)) + throw TypeError(".google.protobuf.DescriptorProto.field: array expected"); + message.field = []; + for (var i = 0; i < object.field.length; ++i) { + if (typeof object.field[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.field: object expected"); + message.field[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.field[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.DescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.nestedType) { + if (!Array.isArray(object.nestedType)) + throw TypeError(".google.protobuf.DescriptorProto.nestedType: array expected"); + message.nestedType = []; + for (var i = 0; i < object.nestedType.length; ++i) { + if (typeof object.nestedType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.nestedType: object expected"); + message.nestedType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.nestedType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.DescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.extensionRange) { + if (!Array.isArray(object.extensionRange)) + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: array expected"); + message.extensionRange = []; + for (var i = 0; i < object.extensionRange.length; ++i) { + if (typeof object.extensionRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: object expected"); + message.extensionRange[i] = $root.google.protobuf.DescriptorProto.ExtensionRange.fromObject(object.extensionRange[i]); + } + } + if (object.oneofDecl) { + if (!Array.isArray(object.oneofDecl)) + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: array expected"); + message.oneofDecl = []; + for (var i = 0; i < object.oneofDecl.length; ++i) { + if (typeof object.oneofDecl[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: object expected"); + message.oneofDecl[i] = $root.google.protobuf.OneofDescriptorProto.fromObject(object.oneofDecl[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MessageOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.DescriptorProto.ReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.DescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.DescriptorProto} message DescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.field = []; + object.nestedType = []; + object.enumType = []; + object.extensionRange = []; + object.extension = []; + object.oneofDecl = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.field && message.field.length) { + object.field = []; + for (var j = 0; j < message.field.length; ++j) + object.field[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.field[j], options); + } + if (message.nestedType && message.nestedType.length) { + object.nestedType = []; + for (var j = 0; j < message.nestedType.length; ++j) + object.nestedType[j] = $root.google.protobuf.DescriptorProto.toObject(message.nestedType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.extensionRange && message.extensionRange.length) { + object.extensionRange = []; + for (var j = 0; j < message.extensionRange.length; ++j) + object.extensionRange[j] = $root.google.protobuf.DescriptorProto.ExtensionRange.toObject(message.extensionRange[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MessageOptions.toObject(message.options, options); + if (message.oneofDecl && message.oneofDecl.length) { + object.oneofDecl = []; + for (var j = 0; j < message.oneofDecl.length; ++j) + object.oneofDecl[j] = $root.google.protobuf.OneofDescriptorProto.toObject(message.oneofDecl[j], options); + } + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.DescriptorProto.ReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; + }; + + /** + * Converts this DescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto + * @instance + * @returns {Object.} JSON object + */ + DescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + DescriptorProto.ExtensionRange = (function() { + + /** + * Properties of an ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @interface IExtensionRange + * @property {number|null} [start] ExtensionRange start + * @property {number|null} [end] ExtensionRange end + * @property {google.protobuf.IExtensionRangeOptions|null} [options] ExtensionRange options + */ + + /** + * Constructs a new ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents an ExtensionRange. + * @implements IExtensionRange + * @constructor + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + */ + function ExtensionRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.start = 0; + + /** + * ExtensionRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.end = 0; + + /** + * ExtensionRange options. + * @member {google.protobuf.IExtensionRangeOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.options = null; + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange instance + */ + ExtensionRange.create = function create(properties) { + return new ExtensionRange(properties); + }; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ExtensionRangeOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + */ + ExtensionRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ExtensionRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected"); + message.options = $root.google.protobuf.ExtensionRangeOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.ExtensionRange} message ExtensionRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + object.options = null; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ExtensionRangeOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this ExtensionRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + * @returns {Object.} JSON object + */ + ExtensionRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ExtensionRange; + })(); + + DescriptorProto.ReservedRange = (function() { + + /** + * Properties of a ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @interface IReservedRange + * @property {number|null} [start] ReservedRange start + * @property {number|null} [end] ReservedRange end + */ + + /** + * Constructs a new ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents a ReservedRange. + * @implements IReservedRange + * @constructor + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + */ + function ReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReservedRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.start = 0; + + /** + * ReservedRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.end = 0; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange instance + */ + ReservedRange.create = function create(properties) { + return new ReservedRange(properties); + }; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReservedRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + */ + ReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ReservedRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.ReservedRange} message ReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this ReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + * @returns {Object.} JSON object + */ + ReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ReservedRange; + })(); + + return DescriptorProto; + })(); + + protobuf.ExtensionRangeOptions = (function() { + + /** + * Properties of an ExtensionRangeOptions. + * @memberof google.protobuf + * @interface IExtensionRangeOptions + * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption + */ + + /** + * Constructs a new ExtensionRangeOptions. + * @memberof google.protobuf + * @classdesc Represents an ExtensionRangeOptions. + * @implements IExtensionRangeOptions + * @constructor + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + */ + function ExtensionRangeOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRangeOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions instance + */ + ExtensionRangeOptions.create = function create(properties) { + return new ExtensionRangeOptions(properties); + }; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRangeOptions message. + * @function verify + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRangeOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + */ + ExtensionRangeOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ExtensionRangeOptions) + return object; + var message = new $root.google.protobuf.ExtensionRangeOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.ExtensionRangeOptions} message ExtensionRangeOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRangeOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @function toJSON + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + * @returns {Object.} JSON object + */ + ExtensionRangeOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return ExtensionRangeOptions; + })(); + + protobuf.FieldDescriptorProto = (function() { + + /** + * Properties of a FieldDescriptorProto. + * @memberof google.protobuf + * @interface IFieldDescriptorProto + * @property {string|null} [name] FieldDescriptorProto name + * @property {number|null} [number] FieldDescriptorProto number + * @property {google.protobuf.FieldDescriptorProto.Label|null} [label] FieldDescriptorProto label + * @property {google.protobuf.FieldDescriptorProto.Type|null} [type] FieldDescriptorProto type + * @property {string|null} [typeName] FieldDescriptorProto typeName + * @property {string|null} [extendee] FieldDescriptorProto extendee + * @property {string|null} [defaultValue] FieldDescriptorProto defaultValue + * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex + * @property {string|null} [jsonName] FieldDescriptorProto jsonName + * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options + * @property {boolean|null} [proto3Optional] FieldDescriptorProto proto3Optional + */ + + /** + * Constructs a new FieldDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FieldDescriptorProto. + * @implements IFieldDescriptorProto + * @constructor + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + */ + function FieldDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.name = ""; + + /** + * FieldDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.number = 0; + + /** + * FieldDescriptorProto label. + * @member {google.protobuf.FieldDescriptorProto.Label} label + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.label = 1; + + /** + * FieldDescriptorProto type. + * @member {google.protobuf.FieldDescriptorProto.Type} type + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.type = 1; + + /** + * FieldDescriptorProto typeName. + * @member {string} typeName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.typeName = ""; + + /** + * FieldDescriptorProto extendee. + * @member {string} extendee + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.extendee = ""; + + /** + * FieldDescriptorProto defaultValue. + * @member {string} defaultValue + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.defaultValue = ""; + + /** + * FieldDescriptorProto oneofIndex. + * @member {number} oneofIndex + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.oneofIndex = 0; + + /** + * FieldDescriptorProto jsonName. + * @member {string} jsonName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.jsonName = ""; + + /** + * FieldDescriptorProto options. + * @member {google.protobuf.IFieldOptions|null|undefined} options + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.options = null; + + /** + * FieldDescriptorProto proto3Optional. + * @member {boolean} proto3Optional + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.proto3Optional = false; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto instance + */ + FieldDescriptorProto.create = function create(properties) { + return new FieldDescriptorProto(properties); + }; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); + if (message.number != null && Object.hasOwnProperty.call(message, "number")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); + if (message.label != null && Object.hasOwnProperty.call(message, "label")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); + if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); + if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); + if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); + if (message.proto3Optional != null && Object.hasOwnProperty.call(message, "proto3Optional")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.proto3Optional); + return writer; + }; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32(); + break; + case 5: + message.type = reader.int32(); + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldDescriptorProto message. + * @function verify + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.label != null && message.hasOwnProperty("label")) + switch (message.label) { + default: + return "label: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + break; + } + if (message.typeName != null && message.hasOwnProperty("typeName")) + if (!$util.isString(message.typeName)) + return "typeName: string expected"; + if (message.extendee != null && message.hasOwnProperty("extendee")) + if (!$util.isString(message.extendee)) + return "extendee: string expected"; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + if (!$util.isString(message.defaultValue)) + return "defaultValue: string expected"; + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + if (!$util.isInteger(message.oneofIndex)) + return "oneofIndex: integer expected"; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + if (!$util.isString(message.jsonName)) + return "jsonName: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FieldOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + if (typeof message.proto3Optional !== "boolean") + return "proto3Optional: boolean expected"; + return null; + }; + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + */ + FieldDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldDescriptorProto) + return object; + var message = new $root.google.protobuf.FieldDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + switch (object.label) { + case "LABEL_OPTIONAL": + case 1: + message.label = 1; + break; + case "LABEL_REQUIRED": + case 2: + message.label = 2; + break; + case "LABEL_REPEATED": + case 3: + message.label = 3; + break; + } + switch (object.type) { + case "TYPE_DOUBLE": + case 1: + message.type = 1; + break; + case "TYPE_FLOAT": + case 2: + message.type = 2; + break; + case "TYPE_INT64": + case 3: + message.type = 3; + break; + case "TYPE_UINT64": + case 4: + message.type = 4; + break; + case "TYPE_INT32": + case 5: + message.type = 5; + break; + case "TYPE_FIXED64": + case 6: + message.type = 6; + break; + case "TYPE_FIXED32": + case 7: + message.type = 7; + break; + case "TYPE_BOOL": + case 8: + message.type = 8; + break; + case "TYPE_STRING": + case 9: + message.type = 9; + break; + case "TYPE_GROUP": + case 10: + message.type = 10; + break; + case "TYPE_MESSAGE": + case 11: + message.type = 11; + break; + case "TYPE_BYTES": + case 12: + message.type = 12; + break; + case "TYPE_UINT32": + case 13: + message.type = 13; + break; + case "TYPE_ENUM": + case 14: + message.type = 14; + break; + case "TYPE_SFIXED32": + case 15: + message.type = 15; + break; + case "TYPE_SFIXED64": + case 16: + message.type = 16; + break; + case "TYPE_SINT32": + case 17: + message.type = 17; + break; + case "TYPE_SINT64": + case 18: + message.type = 18; + break; + } + if (object.typeName != null) + message.typeName = String(object.typeName); + if (object.extendee != null) + message.extendee = String(object.extendee); + if (object.defaultValue != null) + message.defaultValue = String(object.defaultValue); + if (object.oneofIndex != null) + message.oneofIndex = object.oneofIndex | 0; + if (object.jsonName != null) + message.jsonName = String(object.jsonName); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); + } + if (object.proto3Optional != null) + message.proto3Optional = Boolean(object.proto3Optional); + return message; + }; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.FieldDescriptorProto} message FieldDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.extendee = ""; + object.number = 0; + object.label = options.enums === String ? "LABEL_OPTIONAL" : 1; + object.type = options.enums === String ? "TYPE_DOUBLE" : 1; + object.typeName = ""; + object.defaultValue = ""; + object.options = null; + object.oneofIndex = 0; + object.jsonName = ""; + object.proto3Optional = false; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.extendee != null && message.hasOwnProperty("extendee")) + object.extendee = message.extendee; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.label != null && message.hasOwnProperty("label")) + object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; + if (message.typeName != null && message.hasOwnProperty("typeName")) + object.typeName = message.typeName; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + object.defaultValue = message.defaultValue; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FieldOptions.toObject(message.options, options); + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + object.oneofIndex = message.oneofIndex; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + object.jsonName = message.jsonName; + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + object.proto3Optional = message.proto3Optional; + return object; + }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FieldDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FieldDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - v1beta1.TableModifiers = (function() { + /** + * Type enum. + * @name google.protobuf.FieldDescriptorProto.Type + * @enum {number} + * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value + * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value + * @property {number} TYPE_INT64=3 TYPE_INT64 value + * @property {number} TYPE_UINT64=4 TYPE_UINT64 value + * @property {number} TYPE_INT32=5 TYPE_INT32 value + * @property {number} TYPE_FIXED64=6 TYPE_FIXED64 value + * @property {number} TYPE_FIXED32=7 TYPE_FIXED32 value + * @property {number} TYPE_BOOL=8 TYPE_BOOL value + * @property {number} TYPE_STRING=9 TYPE_STRING value + * @property {number} TYPE_GROUP=10 TYPE_GROUP value + * @property {number} TYPE_MESSAGE=11 TYPE_MESSAGE value + * @property {number} TYPE_BYTES=12 TYPE_BYTES value + * @property {number} TYPE_UINT32=13 TYPE_UINT32 value + * @property {number} TYPE_ENUM=14 TYPE_ENUM value + * @property {number} TYPE_SFIXED32=15 TYPE_SFIXED32 value + * @property {number} TYPE_SFIXED64=16 TYPE_SFIXED64 value + * @property {number} TYPE_SINT32=17 TYPE_SINT32 value + * @property {number} TYPE_SINT64=18 TYPE_SINT64 value + */ + FieldDescriptorProto.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "TYPE_DOUBLE"] = 1; + values[valuesById[2] = "TYPE_FLOAT"] = 2; + values[valuesById[3] = "TYPE_INT64"] = 3; + values[valuesById[4] = "TYPE_UINT64"] = 4; + values[valuesById[5] = "TYPE_INT32"] = 5; + values[valuesById[6] = "TYPE_FIXED64"] = 6; + values[valuesById[7] = "TYPE_FIXED32"] = 7; + values[valuesById[8] = "TYPE_BOOL"] = 8; + values[valuesById[9] = "TYPE_STRING"] = 9; + values[valuesById[10] = "TYPE_GROUP"] = 10; + values[valuesById[11] = "TYPE_MESSAGE"] = 11; + values[valuesById[12] = "TYPE_BYTES"] = 12; + values[valuesById[13] = "TYPE_UINT32"] = 13; + values[valuesById[14] = "TYPE_ENUM"] = 14; + values[valuesById[15] = "TYPE_SFIXED32"] = 15; + values[valuesById[16] = "TYPE_SFIXED64"] = 16; + values[valuesById[17] = "TYPE_SINT32"] = 17; + values[valuesById[18] = "TYPE_SINT64"] = 18; + return values; + })(); - /** - * Properties of a TableModifiers. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableModifiers - * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime - */ + /** + * Label enum. + * @name google.protobuf.FieldDescriptorProto.Label + * @enum {number} + * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value + * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value + * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value + */ + FieldDescriptorProto.Label = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "LABEL_OPTIONAL"] = 1; + values[valuesById[2] = "LABEL_REQUIRED"] = 2; + values[valuesById[3] = "LABEL_REPEATED"] = 3; + return values; + })(); - /** - * Constructs a new TableModifiers. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableModifiers. - * @implements ITableModifiers - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set - */ - function TableModifiers(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + return FieldDescriptorProto; + })(); - /** - * TableModifiers snapshotTime. - * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @instance - */ - TableModifiers.prototype.snapshotTime = null; + protobuf.OneofDescriptorProto = (function() { - /** - * Creates a new TableModifiers instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers instance - */ - TableModifiers.create = function create(properties) { - return new TableModifiers(properties); - }; + /** + * Properties of an OneofDescriptorProto. + * @memberof google.protobuf + * @interface IOneofDescriptorProto + * @property {string|null} [name] OneofDescriptorProto name + * @property {google.protobuf.IOneofOptions|null} [options] OneofDescriptorProto options + */ - /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) - $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; + /** + * Constructs a new OneofDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an OneofDescriptorProto. + * @implements IOneofDescriptorProto + * @constructor + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + */ + function OneofDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * OneofDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.name = ""; - /** - * Decodes a TableModifiers message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; + /** + * OneofDescriptorProto options. + * @member {google.protobuf.IOneofOptions|null|undefined} options + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.options = null; - /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto instance + */ + OneofDescriptorProto.create = function create(properties) { + return new OneofDescriptorProto(properties); + }; - /** - * Verifies a TableModifiers message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableModifiers.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); - if (error) - return "snapshotTime." + error; - } - return null; - }; + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; - /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - */ - TableModifiers.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableModifiers) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); - if (object.snapshotTime != null) { - if (typeof object.snapshotTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshotTime: object expected"); - message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); - } - return message; - }; + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; - /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} message TableModifiers - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableModifiers.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.snapshotTime = null; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) - object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); - return object; - }; + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; - /** - * Converts this TableModifiers to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @instance - * @returns {Object.} JSON object - */ - TableModifiers.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - return TableModifiers; - })(); + /** + * Verifies an OneofDescriptorProto message. + * @function verify + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + OneofDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.OneofOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; - return v1beta1; - })(); + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + */ + OneofDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofDescriptorProto) + return object; + var message = new $root.google.protobuf.OneofDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.OneofOptions.fromObject(object.options); + } + return message; + }; - return storage; - })(); + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.OneofDescriptorProto} message OneofDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + OneofDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.OneofOptions.toObject(message.options, options); + return object; + }; - return bigquery; + /** + * Converts this OneofDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.OneofDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + OneofDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return OneofDescriptorProto; })(); - return cloud; - })(); + protobuf.EnumDescriptorProto = (function() { - google.api = (function() { + /** + * Properties of an EnumDescriptorProto. + * @memberof google.protobuf + * @interface IEnumDescriptorProto + * @property {string|null} [name] EnumDescriptorProto name + * @property {Array.|null} [value] EnumDescriptorProto value + * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options + * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange + * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + */ - /** - * Namespace api. - * @memberof google - * @namespace - */ - var api = {}; + /** + * Constructs a new EnumDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumDescriptorProto. + * @implements IEnumDescriptorProto + * @constructor + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + */ + function EnumDescriptorProto(properties) { + this.value = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - api.Http = (function() { + /** + * EnumDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.name = ""; /** - * Properties of a Http. - * @memberof google.api - * @interface IHttp - * @property {Array.|null} [rules] Http rules - * @property {boolean|null} [fullyDecodeReservedExpansion] Http fullyDecodeReservedExpansion + * EnumDescriptorProto value. + * @member {Array.} value + * @memberof google.protobuf.EnumDescriptorProto + * @instance */ + EnumDescriptorProto.prototype.value = $util.emptyArray; /** - * Constructs a new Http. - * @memberof google.api - * @classdesc Represents a Http. - * @implements IHttp - * @constructor - * @param {google.api.IHttp=} [properties] Properties to set + * EnumDescriptorProto options. + * @member {google.protobuf.IEnumOptions|null|undefined} options + * @memberof google.protobuf.EnumDescriptorProto + * @instance */ - function Http(properties) { - this.rules = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + EnumDescriptorProto.prototype.options = null; /** - * Http rules. - * @member {Array.} rules - * @memberof google.api.Http + * EnumDescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.EnumDescriptorProto * @instance */ - Http.prototype.rules = $util.emptyArray; + EnumDescriptorProto.prototype.reservedRange = $util.emptyArray; /** - * Http fullyDecodeReservedExpansion. - * @member {boolean} fullyDecodeReservedExpansion - * @memberof google.api.Http + * EnumDescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.EnumDescriptorProto * @instance */ - Http.prototype.fullyDecodeReservedExpansion = false; + EnumDescriptorProto.prototype.reservedName = $util.emptyArray; /** - * Creates a new Http instance using the specified properties. + * Creates a new EnumDescriptorProto instance using the specified properties. * @function create - * @memberof google.api.Http + * @memberof google.protobuf.EnumDescriptorProto * @static - * @param {google.api.IHttp=} [properties] Properties to set - * @returns {google.api.Http} Http instance + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto instance */ - Http.create = function create(properties) { - return new Http(properties); + EnumDescriptorProto.create = function create(properties) { + return new EnumDescriptorProto(properties); }; /** - * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. * @function encode - * @memberof google.api.Http + * @memberof google.protobuf.EnumDescriptorProto * @static - * @param {google.api.IHttp} message Http message or plain object to encode + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Http.encode = function encode(message, writer) { + EnumDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.rules != null && message.rules.length) - for (var i = 0; i < message.rules.length; ++i) - $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.value != null && message.value.length) + for (var i = 0; i < message.value.length; ++i) + $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); return writer; }; /** - * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. * @function encodeDelimited - * @memberof google.api.Http + * @memberof google.protobuf.EnumDescriptorProto * @static - * @param {google.api.IHttp} message Http message or plain object to encode + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Http.encodeDelimited = function encodeDelimited(message, writer) { + EnumDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a Http message from the specified reader or buffer. + * Decodes an EnumDescriptorProto message from the specified reader or buffer. * @function decode - * @memberof google.api.Http + * @memberof google.protobuf.EnumDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.api.Http} Http + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Http.decode = function decode(reader, length) { + EnumDescriptorProto.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.rules && message.rules.length)) - message.rules = []; - message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + message.name = reader.string(); break; case 2: - message.fullyDecodeReservedExpansion = reader.bool(); + if (!(message.value && message.value.length)) + message.value = []; + message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); break; default: reader.skipType(tag & 7); break; } } - return message; + return message; + }; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumDescriptorProto message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.value != null && message.hasOwnProperty("value")) { + if (!Array.isArray(message.value)) + return "value: array expected"; + for (var i = 0; i < message.value.length; ++i) { + var error = $root.google.protobuf.EnumValueDescriptorProto.verify(message.value[i]); + if (error) + return "value." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + */ + EnumDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.value) { + if (!Array.isArray(object.value)) + throw TypeError(".google.protobuf.EnumDescriptorProto.value: array expected"); + message.value = []; + for (var i = 0; i < object.value.length; ++i) { + if (typeof object.value[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.value: object expected"); + message.value[i] = $root.google.protobuf.EnumValueDescriptorProto.fromObject(object.value[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.EnumDescriptorProto} message EnumDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.value = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.value && message.value.length) { + object.value = []; + for (var j = 0; j < message.value.length; ++j) + object.value[j] = $root.google.protobuf.EnumValueDescriptorProto.toObject(message.value[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumOptions.toObject(message.options, options); + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; }; /** - * Decodes a Http message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.Http - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.Http} Http - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * Converts this EnumDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto + * @instance + * @returns {Object.} JSON object */ - Http.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); + EnumDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - /** - * Verifies a Http message. - * @function verify - * @memberof google.api.Http - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Http.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.rules != null && message.hasOwnProperty("rules")) { - if (!Array.isArray(message.rules)) - return "rules: array expected"; - for (var i = 0; i < message.rules.length; ++i) { - var error = $root.google.api.HttpRule.verify(message.rules[i]); - if (error) - return "rules." + error; - } + EnumDescriptorProto.EnumReservedRange = (function() { + + /** + * Properties of an EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @interface IEnumReservedRange + * @property {number|null} [start] EnumReservedRange start + * @property {number|null} [end] EnumReservedRange end + */ + + /** + * Constructs a new EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @classdesc Represents an EnumReservedRange. + * @implements IEnumReservedRange + * @constructor + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + */ + function EnumReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; } - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) - if (typeof message.fullyDecodeReservedExpansion !== "boolean") - return "fullyDecodeReservedExpansion: boolean expected"; - return null; - }; - /** - * Creates a Http message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.Http - * @static - * @param {Object.} object Plain object - * @returns {google.api.Http} Http - */ - Http.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.Http) - return object; - var message = new $root.google.api.Http(); - if (object.rules) { - if (!Array.isArray(object.rules)) - throw TypeError(".google.api.Http.rules: array expected"); - message.rules = []; - for (var i = 0; i < object.rules.length; ++i) { - if (typeof object.rules[i] !== "object") - throw TypeError(".google.api.Http.rules: object expected"); - message.rules[i] = $root.google.api.HttpRule.fromObject(object.rules[i]); + /** + * EnumReservedRange start. + * @member {number} start + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.start = 0; + + /** + * EnumReservedRange end. + * @member {number} end + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.end = 0; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange instance + */ + EnumReservedRange.create = function create(properties) { + return new EnumReservedRange(properties); + }; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumReservedRange message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + */ + EnumReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto.EnumReservedRange) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.EnumReservedRange} message EnumReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; } - } - if (object.fullyDecodeReservedExpansion != null) - message.fullyDecodeReservedExpansion = Boolean(object.fullyDecodeReservedExpansion); - return message; - }; + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; - /** - * Creates a plain object from a Http message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.Http - * @static - * @param {google.api.Http} message Http - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Http.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.rules = []; - if (options.defaults) - object.fullyDecodeReservedExpansion = false; - if (message.rules && message.rules.length) { - object.rules = []; - for (var j = 0; j < message.rules.length; ++j) - object.rules[j] = $root.google.api.HttpRule.toObject(message.rules[j], options); - } - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) - object.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; - return object; - }; + /** + * Converts this EnumReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + * @returns {Object.} JSON object + */ + EnumReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this Http to JSON. - * @function toJSON - * @memberof google.api.Http - * @instance - * @returns {Object.} JSON object - */ - Http.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + return EnumReservedRange; + })(); - return Http; + return EnumDescriptorProto; })(); - api.HttpRule = (function() { + protobuf.EnumValueDescriptorProto = (function() { /** - * Properties of a HttpRule. - * @memberof google.api - * @interface IHttpRule - * @property {string|null} [selector] HttpRule selector - * @property {string|null} [get] HttpRule get - * @property {string|null} [put] HttpRule put - * @property {string|null} [post] HttpRule post - * @property {string|null} ["delete"] HttpRule delete - * @property {string|null} [patch] HttpRule patch - * @property {google.api.ICustomHttpPattern|null} [custom] HttpRule custom - * @property {string|null} [body] HttpRule body - * @property {string|null} [responseBody] HttpRule responseBody - * @property {Array.|null} [additionalBindings] HttpRule additionalBindings + * Properties of an EnumValueDescriptorProto. + * @memberof google.protobuf + * @interface IEnumValueDescriptorProto + * @property {string|null} [name] EnumValueDescriptorProto name + * @property {number|null} [number] EnumValueDescriptorProto number + * @property {google.protobuf.IEnumValueOptions|null} [options] EnumValueDescriptorProto options */ /** - * Constructs a new HttpRule. - * @memberof google.api - * @classdesc Represents a HttpRule. - * @implements IHttpRule + * Constructs a new EnumValueDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumValueDescriptorProto. + * @implements IEnumValueDescriptorProto * @constructor - * @param {google.api.IHttpRule=} [properties] Properties to set + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set */ - function HttpRule(properties) { - this.additionalBindings = []; + function EnumValueDescriptorProto(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -9833,209 +17630,101 @@ } /** - * HttpRule selector. - * @member {string} selector - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.selector = ""; - - /** - * HttpRule get. - * @member {string|null|undefined} get - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.get = null; - - /** - * HttpRule put. - * @member {string|null|undefined} put - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.put = null; - - /** - * HttpRule post. - * @member {string|null|undefined} post - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.post = null; - - /** - * HttpRule delete. - * @member {string|null|undefined} delete - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype["delete"] = null; - - /** - * HttpRule patch. - * @member {string|null|undefined} patch - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.patch = null; - - /** - * HttpRule custom. - * @member {google.api.ICustomHttpPattern|null|undefined} custom - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.custom = null; - - /** - * HttpRule body. - * @member {string} body - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.body = ""; - - /** - * HttpRule responseBody. - * @member {string} responseBody - * @memberof google.api.HttpRule + * EnumValueDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumValueDescriptorProto * @instance */ - HttpRule.prototype.responseBody = ""; + EnumValueDescriptorProto.prototype.name = ""; /** - * HttpRule additionalBindings. - * @member {Array.} additionalBindings - * @memberof google.api.HttpRule + * EnumValueDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.EnumValueDescriptorProto * @instance */ - HttpRule.prototype.additionalBindings = $util.emptyArray; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; + EnumValueDescriptorProto.prototype.number = 0; /** - * HttpRule pattern. - * @member {"get"|"put"|"post"|"delete"|"patch"|"custom"|undefined} pattern - * @memberof google.api.HttpRule + * EnumValueDescriptorProto options. + * @member {google.protobuf.IEnumValueOptions|null|undefined} options + * @memberof google.protobuf.EnumValueDescriptorProto * @instance */ - Object.defineProperty(HttpRule.prototype, "pattern", { - get: $util.oneOfGetter($oneOfFields = ["get", "put", "post", "delete", "patch", "custom"]), - set: $util.oneOfSetter($oneOfFields) - }); + EnumValueDescriptorProto.prototype.options = null; /** - * Creates a new HttpRule instance using the specified properties. + * Creates a new EnumValueDescriptorProto instance using the specified properties. * @function create - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static - * @param {google.api.IHttpRule=} [properties] Properties to set - * @returns {google.api.HttpRule} HttpRule instance + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto instance */ - HttpRule.create = function create(properties) { - return new HttpRule(properties); + EnumValueDescriptorProto.create = function create(properties) { + return new EnumValueDescriptorProto(properties); }; /** - * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. * @function encode - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static - * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - HttpRule.encode = function encode(message, writer) { + EnumValueDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); - if (message.get != null && Object.hasOwnProperty.call(message, "get")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); - if (message.put != null && Object.hasOwnProperty.call(message, "put")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); - if (message.post != null && Object.hasOwnProperty.call(message, "post")) - writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); - if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); - if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); - if (message.body != null && Object.hasOwnProperty.call(message, "body")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); - if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) - $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.additionalBindings != null && message.additionalBindings.length) - for (var i = 0; i < message.additionalBindings.length; ++i) - $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); - if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) - writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.number != null && Object.hasOwnProperty.call(message, "number")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; /** - * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. * @function encodeDelimited - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static - * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - HttpRule.encodeDelimited = function encodeDelimited(message, writer) { + EnumValueDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a HttpRule message from the specified reader or buffer. + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. * @function decode - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.api.HttpRule} HttpRule + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - HttpRule.decode = function decode(reader, length) { + EnumValueDescriptorProto.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.selector = reader.string(); + message.name = reader.string(); break; case 2: - message.get = reader.string(); + message.number = reader.int32(); break; case 3: - message.put = reader.string(); - break; - case 4: - message.post = reader.string(); - break; - case 5: - message["delete"] = reader.string(); - break; - case 6: - message.patch = reader.string(); - break; - case 8: - message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); - break; - case 7: - message.body = reader.string(); - break; - case 12: - message.responseBody = reader.string(); - break; - case 11: - if (!(message.additionalBindings && message.additionalBindings.length)) - message.additionalBindings = []; - message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -10046,240 +17735,132 @@ }; /** - * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.HttpRule} HttpRule + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - HttpRule.decodeDelimited = function decodeDelimited(reader) { + EnumValueDescriptorProto.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a HttpRule message. + * Verifies an EnumValueDescriptorProto message. * @function verify - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - HttpRule.verify = function verify(message) { + EnumValueDescriptorProto.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - var properties = {}; - if (message.selector != null && message.hasOwnProperty("selector")) - if (!$util.isString(message.selector)) - return "selector: string expected"; - if (message.get != null && message.hasOwnProperty("get")) { - properties.pattern = 1; - if (!$util.isString(message.get)) - return "get: string expected"; - } - if (message.put != null && message.hasOwnProperty("put")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.put)) - return "put: string expected"; - } - if (message.post != null && message.hasOwnProperty("post")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.post)) - return "post: string expected"; - } - if (message["delete"] != null && message.hasOwnProperty("delete")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message["delete"])) - return "delete: string expected"; - } - if (message.patch != null && message.hasOwnProperty("patch")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.patch)) - return "patch: string expected"; - } - if (message.custom != null && message.hasOwnProperty("custom")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - { - var error = $root.google.api.CustomHttpPattern.verify(message.custom); - if (error) - return "custom." + error; - } - } - if (message.body != null && message.hasOwnProperty("body")) - if (!$util.isString(message.body)) - return "body: string expected"; - if (message.responseBody != null && message.hasOwnProperty("responseBody")) - if (!$util.isString(message.responseBody)) - return "responseBody: string expected"; - if (message.additionalBindings != null && message.hasOwnProperty("additionalBindings")) { - if (!Array.isArray(message.additionalBindings)) - return "additionalBindings: array expected"; - for (var i = 0; i < message.additionalBindings.length; ++i) { - var error = $root.google.api.HttpRule.verify(message.additionalBindings[i]); - if (error) - return "additionalBindings." + error; - } + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumValueOptions.verify(message.options); + if (error) + return "options." + error; } return null; }; /** - * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static * @param {Object.} object Plain object - * @returns {google.api.HttpRule} HttpRule + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto */ - HttpRule.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.HttpRule) + EnumValueDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueDescriptorProto) return object; - var message = new $root.google.api.HttpRule(); - if (object.selector != null) - message.selector = String(object.selector); - if (object.get != null) - message.get = String(object.get); - if (object.put != null) - message.put = String(object.put); - if (object.post != null) - message.post = String(object.post); - if (object["delete"] != null) - message["delete"] = String(object["delete"]); - if (object.patch != null) - message.patch = String(object.patch); - if (object.custom != null) { - if (typeof object.custom !== "object") - throw TypeError(".google.api.HttpRule.custom: object expected"); - message.custom = $root.google.api.CustomHttpPattern.fromObject(object.custom); - } - if (object.body != null) - message.body = String(object.body); - if (object.responseBody != null) - message.responseBody = String(object.responseBody); - if (object.additionalBindings) { - if (!Array.isArray(object.additionalBindings)) - throw TypeError(".google.api.HttpRule.additionalBindings: array expected"); - message.additionalBindings = []; - for (var i = 0; i < object.additionalBindings.length; ++i) { - if (typeof object.additionalBindings[i] !== "object") - throw TypeError(".google.api.HttpRule.additionalBindings: object expected"); - message.additionalBindings[i] = $root.google.api.HttpRule.fromObject(object.additionalBindings[i]); - } + var message = new $root.google.protobuf.EnumValueDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumValueOptions.fromObject(object.options); } return message; }; /** - * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. * @function toObject - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @static - * @param {google.api.HttpRule} message HttpRule + * @param {google.protobuf.EnumValueDescriptorProto} message EnumValueDescriptorProto * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - HttpRule.toObject = function toObject(message, options) { + EnumValueDescriptorProto.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.additionalBindings = []; if (options.defaults) { - object.selector = ""; - object.body = ""; - object.responseBody = ""; - } - if (message.selector != null && message.hasOwnProperty("selector")) - object.selector = message.selector; - if (message.get != null && message.hasOwnProperty("get")) { - object.get = message.get; - if (options.oneofs) - object.pattern = "get"; - } - if (message.put != null && message.hasOwnProperty("put")) { - object.put = message.put; - if (options.oneofs) - object.pattern = "put"; - } - if (message.post != null && message.hasOwnProperty("post")) { - object.post = message.post; - if (options.oneofs) - object.pattern = "post"; - } - if (message["delete"] != null && message.hasOwnProperty("delete")) { - object["delete"] = message["delete"]; - if (options.oneofs) - object.pattern = "delete"; - } - if (message.patch != null && message.hasOwnProperty("patch")) { - object.patch = message.patch; - if (options.oneofs) - object.pattern = "patch"; - } - if (message.body != null && message.hasOwnProperty("body")) - object.body = message.body; - if (message.custom != null && message.hasOwnProperty("custom")) { - object.custom = $root.google.api.CustomHttpPattern.toObject(message.custom, options); - if (options.oneofs) - object.pattern = "custom"; - } - if (message.additionalBindings && message.additionalBindings.length) { - object.additionalBindings = []; - for (var j = 0; j < message.additionalBindings.length; ++j) - object.additionalBindings[j] = $root.google.api.HttpRule.toObject(message.additionalBindings[j], options); + object.name = ""; + object.number = 0; + object.options = null; } - if (message.responseBody != null && message.hasOwnProperty("responseBody")) - object.responseBody = message.responseBody; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumValueOptions.toObject(message.options, options); return object; }; /** - * Converts this HttpRule to JSON. + * Converts this EnumValueDescriptorProto to JSON. * @function toJSON - * @memberof google.api.HttpRule + * @memberof google.protobuf.EnumValueDescriptorProto * @instance * @returns {Object.} JSON object */ - HttpRule.prototype.toJSON = function toJSON() { + EnumValueDescriptorProto.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return HttpRule; + return EnumValueDescriptorProto; })(); - api.CustomHttpPattern = (function() { + protobuf.ServiceDescriptorProto = (function() { /** - * Properties of a CustomHttpPattern. - * @memberof google.api - * @interface ICustomHttpPattern - * @property {string|null} [kind] CustomHttpPattern kind - * @property {string|null} [path] CustomHttpPattern path + * Properties of a ServiceDescriptorProto. + * @memberof google.protobuf + * @interface IServiceDescriptorProto + * @property {string|null} [name] ServiceDescriptorProto name + * @property {Array.|null} [method] ServiceDescriptorProto method + * @property {google.protobuf.IServiceOptions|null} [options] ServiceDescriptorProto options */ /** - * Constructs a new CustomHttpPattern. - * @memberof google.api - * @classdesc Represents a CustomHttpPattern. - * @implements ICustomHttpPattern + * Constructs a new ServiceDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a ServiceDescriptorProto. + * @implements IServiceDescriptorProto * @constructor - * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set */ - function CustomHttpPattern(properties) { + function ServiceDescriptorProto(properties) { + this.method = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -10287,88 +17868,104 @@ } /** - * CustomHttpPattern kind. - * @member {string} kind - * @memberof google.api.CustomHttpPattern + * ServiceDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.ServiceDescriptorProto * @instance */ - CustomHttpPattern.prototype.kind = ""; + ServiceDescriptorProto.prototype.name = ""; /** - * CustomHttpPattern path. - * @member {string} path - * @memberof google.api.CustomHttpPattern + * ServiceDescriptorProto method. + * @member {Array.} method + * @memberof google.protobuf.ServiceDescriptorProto * @instance */ - CustomHttpPattern.prototype.path = ""; + ServiceDescriptorProto.prototype.method = $util.emptyArray; /** - * Creates a new CustomHttpPattern instance using the specified properties. + * ServiceDescriptorProto options. + * @member {google.protobuf.IServiceOptions|null|undefined} options + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.options = null; + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. * @function create - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static - * @param {google.api.ICustomHttpPattern=} [properties] Properties to set - * @returns {google.api.CustomHttpPattern} CustomHttpPattern instance + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto instance */ - CustomHttpPattern.create = function create(properties) { - return new CustomHttpPattern(properties); + ServiceDescriptorProto.create = function create(properties) { + return new ServiceDescriptorProto(properties); }; /** - * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. * @function encode - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static - * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - CustomHttpPattern.encode = function encode(message, writer) { + ServiceDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); - if (message.path != null && Object.hasOwnProperty.call(message, "path")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.method != null && message.method.length) + for (var i = 0; i < message.method.length; ++i) + $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; /** - * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. * @function encodeDelimited - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static - * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - CustomHttpPattern.encodeDelimited = function encodeDelimited(message, writer) { + ServiceDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a CustomHttpPattern message from the specified reader or buffer. + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. * @function decode - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CustomHttpPattern.decode = function decode(reader, length) { + ServiceDescriptorProto.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.kind = reader.string(); + message.name = reader.string(); break; case 2: - message.path = reader.string(); + if (!(message.method && message.method.length)) + message.method = []; + message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -10379,148 +17976,152 @@ }; /** - * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CustomHttpPattern.decodeDelimited = function decodeDelimited(reader) { + ServiceDescriptorProto.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a CustomHttpPattern message. + * Verifies a ServiceDescriptorProto message. * @function verify - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - CustomHttpPattern.verify = function verify(message) { + ServiceDescriptorProto.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.kind != null && message.hasOwnProperty("kind")) - if (!$util.isString(message.kind)) - return "kind: string expected"; - if (message.path != null && message.hasOwnProperty("path")) - if (!$util.isString(message.path)) - return "path: string expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.method != null && message.hasOwnProperty("method")) { + if (!Array.isArray(message.method)) + return "method: array expected"; + for (var i = 0; i < message.method.length; ++i) { + var error = $root.google.protobuf.MethodDescriptorProto.verify(message.method[i]); + if (error) + return "method." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ServiceOptions.verify(message.options); + if (error) + return "options." + error; + } return null; }; /** - * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static * @param {Object.} object Plain object - * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto */ - CustomHttpPattern.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.CustomHttpPattern) + ServiceDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceDescriptorProto) return object; - var message = new $root.google.api.CustomHttpPattern(); - if (object.kind != null) - message.kind = String(object.kind); - if (object.path != null) - message.path = String(object.path); + var message = new $root.google.protobuf.ServiceDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.method) { + if (!Array.isArray(object.method)) + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: array expected"); + message.method = []; + for (var i = 0; i < object.method.length; ++i) { + if (typeof object.method[i] !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: object expected"); + message.method[i] = $root.google.protobuf.MethodDescriptorProto.fromObject(object.method[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.ServiceOptions.fromObject(object.options); + } return message; }; /** - * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. * @function toObject - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @static - * @param {google.api.CustomHttpPattern} message CustomHttpPattern + * @param {google.protobuf.ServiceDescriptorProto} message ServiceDescriptorProto * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - CustomHttpPattern.toObject = function toObject(message, options) { + ServiceDescriptorProto.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.method = []; if (options.defaults) { - object.kind = ""; - object.path = ""; + object.name = ""; + object.options = null; } - if (message.kind != null && message.hasOwnProperty("kind")) - object.kind = message.kind; - if (message.path != null && message.hasOwnProperty("path")) - object.path = message.path; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.method && message.method.length) { + object.method = []; + for (var j = 0; j < message.method.length; ++j) + object.method[j] = $root.google.protobuf.MethodDescriptorProto.toObject(message.method[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ServiceOptions.toObject(message.options, options); return object; }; /** - * Converts this CustomHttpPattern to JSON. + * Converts this ServiceDescriptorProto to JSON. * @function toJSON - * @memberof google.api.CustomHttpPattern + * @memberof google.protobuf.ServiceDescriptorProto * @instance * @returns {Object.} JSON object */ - CustomHttpPattern.prototype.toJSON = function toJSON() { + ServiceDescriptorProto.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return CustomHttpPattern; - })(); - - /** - * FieldBehavior enum. - * @name google.api.FieldBehavior - * @enum {number} - * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value - * @property {number} OPTIONAL=1 OPTIONAL value - * @property {number} REQUIRED=2 REQUIRED value - * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value - * @property {number} INPUT_ONLY=4 INPUT_ONLY value - * @property {number} IMMUTABLE=5 IMMUTABLE value - * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value - */ - api.FieldBehavior = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "FIELD_BEHAVIOR_UNSPECIFIED"] = 0; - values[valuesById[1] = "OPTIONAL"] = 1; - values[valuesById[2] = "REQUIRED"] = 2; - values[valuesById[3] = "OUTPUT_ONLY"] = 3; - values[valuesById[4] = "INPUT_ONLY"] = 4; - values[valuesById[5] = "IMMUTABLE"] = 5; - values[valuesById[6] = "UNORDERED_LIST"] = 6; - return values; + return ServiceDescriptorProto; })(); - api.ResourceDescriptor = (function() { + protobuf.MethodDescriptorProto = (function() { /** - * Properties of a ResourceDescriptor. - * @memberof google.api - * @interface IResourceDescriptor - * @property {string|null} [type] ResourceDescriptor type - * @property {Array.|null} [pattern] ResourceDescriptor pattern - * @property {string|null} [nameField] ResourceDescriptor nameField - * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history - * @property {string|null} [plural] ResourceDescriptor plural - * @property {string|null} [singular] ResourceDescriptor singular - * @property {Array.|null} [style] ResourceDescriptor style + * Properties of a MethodDescriptorProto. + * @memberof google.protobuf + * @interface IMethodDescriptorProto + * @property {string|null} [name] MethodDescriptorProto name + * @property {string|null} [inputType] MethodDescriptorProto inputType + * @property {string|null} [outputType] MethodDescriptorProto outputType + * @property {google.protobuf.IMethodOptions|null} [options] MethodDescriptorProto options + * @property {boolean|null} [clientStreaming] MethodDescriptorProto clientStreaming + * @property {boolean|null} [serverStreaming] MethodDescriptorProto serverStreaming */ /** - * Constructs a new ResourceDescriptor. - * @memberof google.api - * @classdesc Represents a ResourceDescriptor. - * @implements IResourceDescriptor + * Constructs a new MethodDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a MethodDescriptorProto. + * @implements IMethodDescriptorProto * @constructor - * @param {google.api.IResourceDescriptor=} [properties] Properties to set + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set */ - function ResourceDescriptor(properties) { - this.pattern = []; - this.style = []; + function MethodDescriptorProto(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -10528,167 +18129,140 @@ } /** - * ResourceDescriptor type. - * @member {string} type - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.type = ""; - - /** - * ResourceDescriptor pattern. - * @member {Array.} pattern - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.pattern = $util.emptyArray; + MethodDescriptorProto.prototype.name = ""; /** - * ResourceDescriptor nameField. - * @member {string} nameField - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto inputType. + * @member {string} inputType + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.nameField = ""; + MethodDescriptorProto.prototype.inputType = ""; /** - * ResourceDescriptor history. - * @member {google.api.ResourceDescriptor.History} history - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto outputType. + * @member {string} outputType + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.history = 0; + MethodDescriptorProto.prototype.outputType = ""; /** - * ResourceDescriptor plural. - * @member {string} plural - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto options. + * @member {google.protobuf.IMethodOptions|null|undefined} options + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.plural = ""; + MethodDescriptorProto.prototype.options = null; /** - * ResourceDescriptor singular. - * @member {string} singular - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto clientStreaming. + * @member {boolean} clientStreaming + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.singular = ""; + MethodDescriptorProto.prototype.clientStreaming = false; /** - * ResourceDescriptor style. - * @member {Array.} style - * @memberof google.api.ResourceDescriptor + * MethodDescriptorProto serverStreaming. + * @member {boolean} serverStreaming + * @memberof google.protobuf.MethodDescriptorProto * @instance */ - ResourceDescriptor.prototype.style = $util.emptyArray; + MethodDescriptorProto.prototype.serverStreaming = false; /** - * Creates a new ResourceDescriptor instance using the specified properties. + * Creates a new MethodDescriptorProto instance using the specified properties. * @function create - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static - * @param {google.api.IResourceDescriptor=} [properties] Properties to set - * @returns {google.api.ResourceDescriptor} ResourceDescriptor instance + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto instance */ - ResourceDescriptor.create = function create(properties) { - return new ResourceDescriptor(properties); + MethodDescriptorProto.create = function create(properties) { + return new MethodDescriptorProto(properties); }; /** - * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. * @function encode - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static - * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ResourceDescriptor.encode = function encode(message, writer) { + MethodDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.pattern != null && message.pattern.length) - for (var i = 0; i < message.pattern.length; ++i) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); - if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); - if (message.history != null && Object.hasOwnProperty.call(message, "history")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); - if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); - if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); - if (message.style != null && message.style.length) { - writer.uint32(/* id 10, wireType 2 =*/82).fork(); - for (var i = 0; i < message.style.length; ++i) - writer.int32(message.style[i]); - writer.ldelim(); - } + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); + if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); + if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); return writer; }; /** - * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. * @function encodeDelimited - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static - * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ResourceDescriptor.encodeDelimited = function encodeDelimited(message, writer) { + MethodDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ResourceDescriptor message from the specified reader or buffer. + * Decodes a MethodDescriptorProto message from the specified reader or buffer. * @function decode - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceDescriptor.decode = function decode(reader, length) { + MethodDescriptorProto.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.type = reader.string(); + message.name = reader.string(); break; case 2: - if (!(message.pattern && message.pattern.length)) - message.pattern = []; - message.pattern.push(reader.string()); + message.inputType = reader.string(); break; case 3: - message.nameField = reader.string(); + message.outputType = reader.string(); break; case 4: - message.history = reader.int32(); + message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); break; case 5: - message.plural = reader.string(); + message.clientStreaming = reader.bool(); break; case 6: - message.singular = reader.string(); - break; - case 10: - if (!(message.style && message.style.length)) - message.style = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.style.push(reader.int32()); - } else - message.style.push(reader.int32()); + message.serverStreaming = reader.bool(); break; default: reader.skipType(tag & 7); @@ -10699,335 +18273,531 @@ }; /** - * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceDescriptor.decodeDelimited = function decodeDelimited(reader) { + MethodDescriptorProto.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ResourceDescriptor message. + * Verifies a MethodDescriptorProto message. * @function verify - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ResourceDescriptor.verify = function verify(message) { + MethodDescriptorProto.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.type != null && message.hasOwnProperty("type")) - if (!$util.isString(message.type)) - return "type: string expected"; - if (message.pattern != null && message.hasOwnProperty("pattern")) { - if (!Array.isArray(message.pattern)) - return "pattern: array expected"; - for (var i = 0; i < message.pattern.length; ++i) - if (!$util.isString(message.pattern[i])) - return "pattern: string[] expected"; - } - if (message.nameField != null && message.hasOwnProperty("nameField")) - if (!$util.isString(message.nameField)) - return "nameField: string expected"; - if (message.history != null && message.hasOwnProperty("history")) - switch (message.history) { - default: - return "history: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.plural != null && message.hasOwnProperty("plural")) - if (!$util.isString(message.plural)) - return "plural: string expected"; - if (message.singular != null && message.hasOwnProperty("singular")) - if (!$util.isString(message.singular)) - return "singular: string expected"; - if (message.style != null && message.hasOwnProperty("style")) { - if (!Array.isArray(message.style)) - return "style: array expected"; - for (var i = 0; i < message.style.length; ++i) - switch (message.style[i]) { - default: - return "style: enum value[] expected"; - case 0: - case 1: - break; - } - } - return null; - }; - - /** - * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.ResourceDescriptor - * @static - * @param {Object.} object Plain object - * @returns {google.api.ResourceDescriptor} ResourceDescriptor - */ - ResourceDescriptor.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.ResourceDescriptor) - return object; - var message = new $root.google.api.ResourceDescriptor(); - if (object.type != null) - message.type = String(object.type); - if (object.pattern) { - if (!Array.isArray(object.pattern)) - throw TypeError(".google.api.ResourceDescriptor.pattern: array expected"); - message.pattern = []; - for (var i = 0; i < object.pattern.length; ++i) - message.pattern[i] = String(object.pattern[i]); - } - if (object.nameField != null) - message.nameField = String(object.nameField); - switch (object.history) { - case "HISTORY_UNSPECIFIED": - case 0: - message.history = 0; - break; - case "ORIGINALLY_SINGLE_PATTERN": - case 1: - message.history = 1; - break; - case "FUTURE_MULTI_PATTERN": - case 2: - message.history = 2; - break; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.inputType != null && message.hasOwnProperty("inputType")) + if (!$util.isString(message.inputType)) + return "inputType: string expected"; + if (message.outputType != null && message.hasOwnProperty("outputType")) + if (!$util.isString(message.outputType)) + return "outputType: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MethodOptions.verify(message.options); + if (error) + return "options." + error; } - if (object.plural != null) - message.plural = String(object.plural); - if (object.singular != null) - message.singular = String(object.singular); - if (object.style) { - if (!Array.isArray(object.style)) - throw TypeError(".google.api.ResourceDescriptor.style: array expected"); - message.style = []; - for (var i = 0; i < object.style.length; ++i) - switch (object.style[i]) { - default: - case "STYLE_UNSPECIFIED": - case 0: - message.style[i] = 0; - break; - case "DECLARATIVE_FRIENDLY": - case 1: - message.style[i] = 1; - break; - } + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + if (typeof message.clientStreaming !== "boolean") + return "clientStreaming: boolean expected"; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + if (typeof message.serverStreaming !== "boolean") + return "serverStreaming: boolean expected"; + return null; + }; + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + */ + MethodDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodDescriptorProto) + return object; + var message = new $root.google.protobuf.MethodDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.inputType != null) + message.inputType = String(object.inputType); + if (object.outputType != null) + message.outputType = String(object.outputType); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MethodOptions.fromObject(object.options); } + if (object.clientStreaming != null) + message.clientStreaming = Boolean(object.clientStreaming); + if (object.serverStreaming != null) + message.serverStreaming = Boolean(object.serverStreaming); return message; }; /** - * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. * @function toObject - * @memberof google.api.ResourceDescriptor + * @memberof google.protobuf.MethodDescriptorProto * @static - * @param {google.api.ResourceDescriptor} message ResourceDescriptor + * @param {google.protobuf.MethodDescriptorProto} message MethodDescriptorProto * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ResourceDescriptor.toObject = function toObject(message, options) { + MethodDescriptorProto.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.pattern = []; - object.style = []; - } if (options.defaults) { - object.type = ""; - object.nameField = ""; - object.history = options.enums === String ? "HISTORY_UNSPECIFIED" : 0; - object.plural = ""; - object.singular = ""; - } - if (message.type != null && message.hasOwnProperty("type")) - object.type = message.type; - if (message.pattern && message.pattern.length) { - object.pattern = []; - for (var j = 0; j < message.pattern.length; ++j) - object.pattern[j] = message.pattern[j]; - } - if (message.nameField != null && message.hasOwnProperty("nameField")) - object.nameField = message.nameField; - if (message.history != null && message.hasOwnProperty("history")) - object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] : message.history; - if (message.plural != null && message.hasOwnProperty("plural")) - object.plural = message.plural; - if (message.singular != null && message.hasOwnProperty("singular")) - object.singular = message.singular; - if (message.style && message.style.length) { - object.style = []; - for (var j = 0; j < message.style.length; ++j) - object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; + object.name = ""; + object.inputType = ""; + object.outputType = ""; + object.options = null; + object.clientStreaming = false; + object.serverStreaming = false; } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.inputType != null && message.hasOwnProperty("inputType")) + object.inputType = message.inputType; + if (message.outputType != null && message.hasOwnProperty("outputType")) + object.outputType = message.outputType; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MethodOptions.toObject(message.options, options); + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + object.clientStreaming = message.clientStreaming; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + object.serverStreaming = message.serverStreaming; return object; }; /** - * Converts this ResourceDescriptor to JSON. - * @function toJSON - * @memberof google.api.ResourceDescriptor + * Converts this MethodDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.MethodDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + MethodDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return MethodDescriptorProto; + })(); + + protobuf.FileOptions = (function() { + + /** + * Properties of a FileOptions. + * @memberof google.protobuf + * @interface IFileOptions + * @property {string|null} [javaPackage] FileOptions javaPackage + * @property {string|null} [javaOuterClassname] FileOptions javaOuterClassname + * @property {boolean|null} [javaMultipleFiles] FileOptions javaMultipleFiles + * @property {boolean|null} [javaGenerateEqualsAndHash] FileOptions javaGenerateEqualsAndHash + * @property {boolean|null} [javaStringCheckUtf8] FileOptions javaStringCheckUtf8 + * @property {google.protobuf.FileOptions.OptimizeMode|null} [optimizeFor] FileOptions optimizeFor + * @property {string|null} [goPackage] FileOptions goPackage + * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices + * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices + * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices + * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices + * @property {boolean|null} [deprecated] FileOptions deprecated + * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas + * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix + * @property {string|null} [csharpNamespace] FileOptions csharpNamespace + * @property {string|null} [swiftPrefix] FileOptions swiftPrefix + * @property {string|null} [phpClassPrefix] FileOptions phpClassPrefix + * @property {string|null} [phpNamespace] FileOptions phpNamespace + * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace + * @property {string|null} [rubyPackage] FileOptions rubyPackage + * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption + * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition + */ + + /** + * Constructs a new FileOptions. + * @memberof google.protobuf + * @classdesc Represents a FileOptions. + * @implements IFileOptions + * @constructor + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + */ + function FileOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.resourceDefinition"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileOptions javaPackage. + * @member {string} javaPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaPackage = ""; + + /** + * FileOptions javaOuterClassname. + * @member {string} javaOuterClassname + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaOuterClassname = ""; + + /** + * FileOptions javaMultipleFiles. + * @member {boolean} javaMultipleFiles + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaMultipleFiles = false; + + /** + * FileOptions javaGenerateEqualsAndHash. + * @member {boolean} javaGenerateEqualsAndHash + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenerateEqualsAndHash = false; + + /** + * FileOptions javaStringCheckUtf8. + * @member {boolean} javaStringCheckUtf8 + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaStringCheckUtf8 = false; + + /** + * FileOptions optimizeFor. + * @member {google.protobuf.FileOptions.OptimizeMode} optimizeFor + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.optimizeFor = 1; + + /** + * FileOptions goPackage. + * @member {string} goPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.goPackage = ""; + + /** + * FileOptions ccGenericServices. + * @member {boolean} ccGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.ccGenericServices = false; + + /** + * FileOptions javaGenericServices. + * @member {boolean} javaGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenericServices = false; + + /** + * FileOptions pyGenericServices. + * @member {boolean} pyGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.pyGenericServices = false; + + /** + * FileOptions phpGenericServices. + * @member {boolean} phpGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpGenericServices = false; + + /** + * FileOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FileOptions * @instance - * @returns {Object.} JSON object */ - ResourceDescriptor.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + FileOptions.prototype.deprecated = false; /** - * History enum. - * @name google.api.ResourceDescriptor.History - * @enum {number} - * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value - * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value - * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value + * FileOptions ccEnableArenas. + * @member {boolean} ccEnableArenas + * @memberof google.protobuf.FileOptions + * @instance */ - ResourceDescriptor.History = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "HISTORY_UNSPECIFIED"] = 0; - values[valuesById[1] = "ORIGINALLY_SINGLE_PATTERN"] = 1; - values[valuesById[2] = "FUTURE_MULTI_PATTERN"] = 2; - return values; - })(); + FileOptions.prototype.ccEnableArenas = true; /** - * Style enum. - * @name google.api.ResourceDescriptor.Style - * @enum {number} - * @property {number} STYLE_UNSPECIFIED=0 STYLE_UNSPECIFIED value - * @property {number} DECLARATIVE_FRIENDLY=1 DECLARATIVE_FRIENDLY value + * FileOptions objcClassPrefix. + * @member {string} objcClassPrefix + * @memberof google.protobuf.FileOptions + * @instance */ - ResourceDescriptor.Style = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STYLE_UNSPECIFIED"] = 0; - values[valuesById[1] = "DECLARATIVE_FRIENDLY"] = 1; - return values; - })(); + FileOptions.prototype.objcClassPrefix = ""; - return ResourceDescriptor; - })(); + /** + * FileOptions csharpNamespace. + * @member {string} csharpNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.csharpNamespace = ""; - api.ResourceReference = (function() { + /** + * FileOptions swiftPrefix. + * @member {string} swiftPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.swiftPrefix = ""; /** - * Properties of a ResourceReference. - * @memberof google.api - * @interface IResourceReference - * @property {string|null} [type] ResourceReference type - * @property {string|null} [childType] ResourceReference childType + * FileOptions phpClassPrefix. + * @member {string} phpClassPrefix + * @memberof google.protobuf.FileOptions + * @instance */ + FileOptions.prototype.phpClassPrefix = ""; /** - * Constructs a new ResourceReference. - * @memberof google.api - * @classdesc Represents a ResourceReference. - * @implements IResourceReference - * @constructor - * @param {google.api.IResourceReference=} [properties] Properties to set + * FileOptions phpNamespace. + * @member {string} phpNamespace + * @memberof google.protobuf.FileOptions + * @instance */ - function ResourceReference(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + FileOptions.prototype.phpNamespace = ""; /** - * ResourceReference type. - * @member {string} type - * @memberof google.api.ResourceReference + * FileOptions phpMetadataNamespace. + * @member {string} phpMetadataNamespace + * @memberof google.protobuf.FileOptions * @instance */ - ResourceReference.prototype.type = ""; + FileOptions.prototype.phpMetadataNamespace = ""; /** - * ResourceReference childType. - * @member {string} childType - * @memberof google.api.ResourceReference + * FileOptions rubyPackage. + * @member {string} rubyPackage + * @memberof google.protobuf.FileOptions * @instance */ - ResourceReference.prototype.childType = ""; + FileOptions.prototype.rubyPackage = ""; /** - * Creates a new ResourceReference instance using the specified properties. + * FileOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * FileOptions .google.api.resourceDefinition. + * @member {Array.} .google.api.resourceDefinition + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype[".google.api.resourceDefinition"] = $util.emptyArray; + + /** + * Creates a new FileOptions instance using the specified properties. * @function create - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static - * @param {google.api.IResourceReference=} [properties] Properties to set - * @returns {google.api.ResourceReference} ResourceReference instance + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + * @returns {google.protobuf.FileOptions} FileOptions instance */ - ResourceReference.create = function create(properties) { - return new ResourceReference(properties); + FileOptions.create = function create(properties) { + return new FileOptions(properties); }; /** - * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. * @function encode - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static - * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ResourceReference.encode = function encode(message, writer) { + FileOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); + if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); + if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); + if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); + if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); + if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) + writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); + if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) + writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); + if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); + if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) + writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); + if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) + writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); + if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) + writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); + if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) + writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); + if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) + writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); + if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) + writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); + if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) + writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); + if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) + writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); + if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) + writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); + if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) + writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); + if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) + writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); + if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) + writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resourceDefinition"] != null && message[".google.api.resourceDefinition"].length) + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resourceDefinition"][i], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); return writer; }; /** - * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static - * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ResourceReference.encodeDelimited = function encodeDelimited(message, writer) { + FileOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ResourceReference message from the specified reader or buffer. + * Decodes a FileOptions message from the specified reader or buffer. * @function decode - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.api.ResourceReference} ResourceReference + * @returns {google.protobuf.FileOptions} FileOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceReference.decode = function decode(reader, length) { + FileOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.type = reader.string(); + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32(); + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); break; - case 2: - message.childType = reader.string(); + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1053: + if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) + message[".google.api.resourceDefinition"] = []; + message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -11038,129 +18808,352 @@ }; /** - * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * Decodes a FileOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.ResourceReference} ResourceReference + * @returns {google.protobuf.FileOptions} FileOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceReference.decodeDelimited = function decodeDelimited(reader) { + FileOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ResourceReference message. + * Verifies a FileOptions message. * @function verify - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ResourceReference.verify = function verify(message) { + FileOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.type != null && message.hasOwnProperty("type")) - if (!$util.isString(message.type)) - return "type: string expected"; - if (message.childType != null && message.hasOwnProperty("childType")) - if (!$util.isString(message.childType)) - return "childType: string expected"; + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + if (!$util.isString(message.javaPackage)) + return "javaPackage: string expected"; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + if (!$util.isString(message.javaOuterClassname)) + return "javaOuterClassname: string expected"; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + if (typeof message.javaMultipleFiles !== "boolean") + return "javaMultipleFiles: boolean expected"; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + if (typeof message.javaGenerateEqualsAndHash !== "boolean") + return "javaGenerateEqualsAndHash: boolean expected"; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + if (typeof message.javaStringCheckUtf8 !== "boolean") + return "javaStringCheckUtf8: boolean expected"; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + switch (message.optimizeFor) { + default: + return "optimizeFor: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + if (!$util.isString(message.goPackage)) + return "goPackage: string expected"; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + if (typeof message.ccGenericServices !== "boolean") + return "ccGenericServices: boolean expected"; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + if (typeof message.javaGenericServices !== "boolean") + return "javaGenericServices: boolean expected"; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + if (typeof message.pyGenericServices !== "boolean") + return "pyGenericServices: boolean expected"; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + if (typeof message.phpGenericServices !== "boolean") + return "phpGenericServices: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + if (typeof message.ccEnableArenas !== "boolean") + return "ccEnableArenas: boolean expected"; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + if (!$util.isString(message.objcClassPrefix)) + return "objcClassPrefix: string expected"; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + if (!$util.isString(message.csharpNamespace)) + return "csharpNamespace: string expected"; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + if (!$util.isString(message.swiftPrefix)) + return "swiftPrefix: string expected"; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + if (!$util.isString(message.phpClassPrefix)) + return "phpClassPrefix: string expected"; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + if (!$util.isString(message.phpNamespace)) + return "phpNamespace: string expected"; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + if (!$util.isString(message.phpMetadataNamespace)) + return "phpMetadataNamespace: string expected"; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + if (!$util.isString(message.rubyPackage)) + return "rubyPackage: string expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.resourceDefinition"] != null && message.hasOwnProperty(".google.api.resourceDefinition")) { + if (!Array.isArray(message[".google.api.resourceDefinition"])) + return ".google.api.resourceDefinition: array expected"; + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resourceDefinition"][i]); + if (error) + return ".google.api.resourceDefinition." + error; + } + } return null; }; /** - * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static * @param {Object.} object Plain object - * @returns {google.api.ResourceReference} ResourceReference + * @returns {google.protobuf.FileOptions} FileOptions */ - ResourceReference.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.ResourceReference) + FileOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileOptions) return object; - var message = new $root.google.api.ResourceReference(); - if (object.type != null) - message.type = String(object.type); - if (object.childType != null) - message.childType = String(object.childType); + var message = new $root.google.protobuf.FileOptions(); + if (object.javaPackage != null) + message.javaPackage = String(object.javaPackage); + if (object.javaOuterClassname != null) + message.javaOuterClassname = String(object.javaOuterClassname); + if (object.javaMultipleFiles != null) + message.javaMultipleFiles = Boolean(object.javaMultipleFiles); + if (object.javaGenerateEqualsAndHash != null) + message.javaGenerateEqualsAndHash = Boolean(object.javaGenerateEqualsAndHash); + if (object.javaStringCheckUtf8 != null) + message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); + switch (object.optimizeFor) { + case "SPEED": + case 1: + message.optimizeFor = 1; + break; + case "CODE_SIZE": + case 2: + message.optimizeFor = 2; + break; + case "LITE_RUNTIME": + case 3: + message.optimizeFor = 3; + break; + } + if (object.goPackage != null) + message.goPackage = String(object.goPackage); + if (object.ccGenericServices != null) + message.ccGenericServices = Boolean(object.ccGenericServices); + if (object.javaGenericServices != null) + message.javaGenericServices = Boolean(object.javaGenericServices); + if (object.pyGenericServices != null) + message.pyGenericServices = Boolean(object.pyGenericServices); + if (object.phpGenericServices != null) + message.phpGenericServices = Boolean(object.phpGenericServices); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.ccEnableArenas != null) + message.ccEnableArenas = Boolean(object.ccEnableArenas); + if (object.objcClassPrefix != null) + message.objcClassPrefix = String(object.objcClassPrefix); + if (object.csharpNamespace != null) + message.csharpNamespace = String(object.csharpNamespace); + if (object.swiftPrefix != null) + message.swiftPrefix = String(object.swiftPrefix); + if (object.phpClassPrefix != null) + message.phpClassPrefix = String(object.phpClassPrefix); + if (object.phpNamespace != null) + message.phpNamespace = String(object.phpNamespace); + if (object.phpMetadataNamespace != null) + message.phpMetadataNamespace = String(object.phpMetadataNamespace); + if (object.rubyPackage != null) + message.rubyPackage = String(object.rubyPackage); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.resourceDefinition"]) { + if (!Array.isArray(object[".google.api.resourceDefinition"])) + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: array expected"); + message[".google.api.resourceDefinition"] = []; + for (var i = 0; i < object[".google.api.resourceDefinition"].length; ++i) { + if (typeof object[".google.api.resourceDefinition"][i] !== "object") + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: object expected"); + message[".google.api.resourceDefinition"][i] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resourceDefinition"][i]); + } + } return message; }; /** - * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @static - * @param {google.api.ResourceReference} message ResourceReference + * @param {google.protobuf.FileOptions} message FileOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ResourceReference.toObject = function toObject(message, options) { + FileOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.resourceDefinition"] = []; + } if (options.defaults) { - object.type = ""; - object.childType = ""; + object.javaPackage = ""; + object.javaOuterClassname = ""; + object.optimizeFor = options.enums === String ? "SPEED" : 1; + object.javaMultipleFiles = false; + object.goPackage = ""; + object.ccGenericServices = false; + object.javaGenericServices = false; + object.pyGenericServices = false; + object.javaGenerateEqualsAndHash = false; + object.deprecated = false; + object.javaStringCheckUtf8 = false; + object.ccEnableArenas = true; + object.objcClassPrefix = ""; + object.csharpNamespace = ""; + object.swiftPrefix = ""; + object.phpClassPrefix = ""; + object.phpNamespace = ""; + object.phpGenericServices = false; + object.phpMetadataNamespace = ""; + object.rubyPackage = ""; + } + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + object.javaPackage = message.javaPackage; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + object.javaOuterClassname = message.javaOuterClassname; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + object.javaMultipleFiles = message.javaMultipleFiles; + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + object.goPackage = message.goPackage; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + object.ccGenericServices = message.ccGenericServices; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + object.javaGenericServices = message.javaGenericServices; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + object.pyGenericServices = message.pyGenericServices; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + object.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + object.javaStringCheckUtf8 = message.javaStringCheckUtf8; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + object.ccEnableArenas = message.ccEnableArenas; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + object.objcClassPrefix = message.objcClassPrefix; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + object.csharpNamespace = message.csharpNamespace; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + object.swiftPrefix = message.swiftPrefix; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + object.phpClassPrefix = message.phpClassPrefix; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + object.phpNamespace = message.phpNamespace; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + object.phpGenericServices = message.phpGenericServices; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + object.phpMetadataNamespace = message.phpMetadataNamespace; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + object.rubyPackage = message.rubyPackage; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length) { + object[".google.api.resourceDefinition"] = []; + for (var j = 0; j < message[".google.api.resourceDefinition"].length; ++j) + object[".google.api.resourceDefinition"][j] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resourceDefinition"][j], options); } - if (message.type != null && message.hasOwnProperty("type")) - object.type = message.type; - if (message.childType != null && message.hasOwnProperty("childType")) - object.childType = message.childType; return object; }; /** - * Converts this ResourceReference to JSON. + * Converts this FileOptions to JSON. * @function toJSON - * @memberof google.api.ResourceReference + * @memberof google.protobuf.FileOptions * @instance * @returns {Object.} JSON object */ - ResourceReference.prototype.toJSON = function toJSON() { + FileOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ResourceReference; - })(); - - return api; - })(); - - google.protobuf = (function() { + /** + * OptimizeMode enum. + * @name google.protobuf.FileOptions.OptimizeMode + * @enum {number} + * @property {number} SPEED=1 SPEED value + * @property {number} CODE_SIZE=2 CODE_SIZE value + * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value + */ + FileOptions.OptimizeMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "SPEED"] = 1; + values[valuesById[2] = "CODE_SIZE"] = 2; + values[valuesById[3] = "LITE_RUNTIME"] = 3; + return values; + })(); - /** - * Namespace protobuf. - * @memberof google - * @namespace - */ - var protobuf = {}; + return FileOptions; + })(); - protobuf.FileDescriptorSet = (function() { + protobuf.MessageOptions = (function() { /** - * Properties of a FileDescriptorSet. + * Properties of a MessageOptions. * @memberof google.protobuf - * @interface IFileDescriptorSet - * @property {Array.|null} [file] FileDescriptorSet file + * @interface IMessageOptions + * @property {boolean|null} [messageSetWireFormat] MessageOptions messageSetWireFormat + * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor + * @property {boolean|null} [deprecated] MessageOptions deprecated + * @property {boolean|null} [mapEntry] MessageOptions mapEntry + * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption + * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource */ /** - * Constructs a new FileDescriptorSet. + * Constructs a new MessageOptions. * @memberof google.protobuf - * @classdesc Represents a FileDescriptorSet. - * @implements IFileDescriptorSet + * @classdesc Represents a MessageOptions. + * @implements IMessageOptions * @constructor - * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set */ - function FileDescriptorSet(properties) { - this.file = []; + function MessageOptions(properties) { + this.uninterpretedOption = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -11168,78 +19161,143 @@ } /** - * FileDescriptorSet file. - * @member {Array.} file - * @memberof google.protobuf.FileDescriptorSet + * MessageOptions messageSetWireFormat. + * @member {boolean} messageSetWireFormat + * @memberof google.protobuf.MessageOptions * @instance */ - FileDescriptorSet.prototype.file = $util.emptyArray; + MessageOptions.prototype.messageSetWireFormat = false; /** - * Creates a new FileDescriptorSet instance using the specified properties. + * MessageOptions noStandardDescriptorAccessor. + * @member {boolean} noStandardDescriptorAccessor + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.noStandardDescriptorAccessor = false; + + /** + * MessageOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.deprecated = false; + + /** + * MessageOptions mapEntry. + * @member {boolean} mapEntry + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.mapEntry = false; + + /** + * MessageOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * MessageOptions .google.api.resource. + * @member {google.api.IResourceDescriptor|null|undefined} .google.api.resource + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype[".google.api.resource"] = null; + + /** + * Creates a new MessageOptions instance using the specified properties. * @function create - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static - * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet instance + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + * @returns {google.protobuf.MessageOptions} MessageOptions instance */ - FileDescriptorSet.create = function create(properties) { - return new FileDescriptorSet(properties); + MessageOptions.create = function create(properties) { + return new MessageOptions(properties); }; /** - * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. * @function encode - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static - * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileDescriptorSet.encode = function encode(message, writer) { + MessageOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.file != null && message.file.length) - for (var i = 0; i < message.file.length; ++i) - $root.google.protobuf.FileDescriptorProto.encode(message.file[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); + if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) + writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); return writer; }; /** - * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static - * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileDescriptorSet.encodeDelimited = function encodeDelimited(message, writer) { + MessageOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FileDescriptorSet message from the specified reader or buffer. + * Decodes a MessageOptions message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @returns {google.protobuf.MessageOptions} MessageOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorSet.decode = function decode(reader, length) { + MessageOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.file && message.file.length)) - message.file = []; - message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1053: + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -11250,142 +19308,181 @@ }; /** - * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @returns {google.protobuf.MessageOptions} MessageOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorSet.decodeDelimited = function decodeDelimited(reader) { + MessageOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a FileDescriptorSet message. + * Verifies a MessageOptions message. * @function verify - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - FileDescriptorSet.verify = function verify(message) { + MessageOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.file != null && message.hasOwnProperty("file")) { - if (!Array.isArray(message.file)) - return "file: array expected"; - for (var i = 0; i < message.file.length; ++i) { - var error = $root.google.protobuf.FileDescriptorProto.verify(message.file[i]); + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + if (typeof message.messageSetWireFormat !== "boolean") + return "messageSetWireFormat: boolean expected"; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + if (typeof message.noStandardDescriptorAccessor !== "boolean") + return "noStandardDescriptorAccessor: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + if (typeof message.mapEntry !== "boolean") + return "mapEntry: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); if (error) - return "file." + error; + return "uninterpretedOption." + error; } } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resource"]); + if (error) + return ".google.api.resource." + error; + } return null; }; /** - * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static * @param {Object.} object Plain object - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @returns {google.protobuf.MessageOptions} MessageOptions */ - FileDescriptorSet.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileDescriptorSet) + MessageOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MessageOptions) return object; - var message = new $root.google.protobuf.FileDescriptorSet(); - if (object.file) { - if (!Array.isArray(object.file)) - throw TypeError(".google.protobuf.FileDescriptorSet.file: array expected"); - message.file = []; - for (var i = 0; i < object.file.length; ++i) { - if (typeof object.file[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorSet.file: object expected"); - message.file[i] = $root.google.protobuf.FileDescriptorProto.fromObject(object.file[i]); + var message = new $root.google.protobuf.MessageOptions(); + if (object.messageSetWireFormat != null) + message.messageSetWireFormat = Boolean(object.messageSetWireFormat); + if (object.noStandardDescriptorAccessor != null) + message.noStandardDescriptorAccessor = Boolean(object.noStandardDescriptorAccessor); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.mapEntry != null) + message.mapEntry = Boolean(object.mapEntry); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } + if (object[".google.api.resource"] != null) { + if (typeof object[".google.api.resource"] !== "object") + throw TypeError(".google.protobuf.MessageOptions..google.api.resource: object expected"); + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resource"]); + } return message; }; /** - * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @static - * @param {google.protobuf.FileDescriptorSet} message FileDescriptorSet + * @param {google.protobuf.MessageOptions} message MessageOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FileDescriptorSet.toObject = function toObject(message, options) { + MessageOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.arrays || options.defaults) - object.file = []; - if (message.file && message.file.length) { - object.file = []; - for (var j = 0; j < message.file.length; ++j) - object.file[j] = $root.google.protobuf.FileDescriptorProto.toObject(message.file[j], options); + object.uninterpretedOption = []; + if (options.defaults) { + object.messageSetWireFormat = false; + object.noStandardDescriptorAccessor = false; + object.deprecated = false; + object.mapEntry = false; + object[".google.api.resource"] = null; + } + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + object.messageSetWireFormat = message.messageSetWireFormat; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + object.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + object.mapEntry = message.mapEntry; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + object[".google.api.resource"] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resource"], options); return object; }; /** - * Converts this FileDescriptorSet to JSON. + * Converts this MessageOptions to JSON. * @function toJSON - * @memberof google.protobuf.FileDescriptorSet + * @memberof google.protobuf.MessageOptions * @instance * @returns {Object.} JSON object */ - FileDescriptorSet.prototype.toJSON = function toJSON() { + MessageOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return FileDescriptorSet; + return MessageOptions; })(); - protobuf.FileDescriptorProto = (function() { + protobuf.FieldOptions = (function() { /** - * Properties of a FileDescriptorProto. + * Properties of a FieldOptions. * @memberof google.protobuf - * @interface IFileDescriptorProto - * @property {string|null} [name] FileDescriptorProto name - * @property {string|null} ["package"] FileDescriptorProto package - * @property {Array.|null} [dependency] FileDescriptorProto dependency - * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency - * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency - * @property {Array.|null} [messageType] FileDescriptorProto messageType - * @property {Array.|null} [enumType] FileDescriptorProto enumType - * @property {Array.|null} [service] FileDescriptorProto service - * @property {Array.|null} [extension] FileDescriptorProto extension - * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options - * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo - * @property {string|null} [syntax] FileDescriptorProto syntax + * @interface IFieldOptions + * @property {google.protobuf.FieldOptions.CType|null} [ctype] FieldOptions ctype + * @property {boolean|null} [packed] FieldOptions packed + * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype + * @property {boolean|null} [lazy] FieldOptions lazy + * @property {boolean|null} [deprecated] FieldOptions deprecated + * @property {boolean|null} [weak] FieldOptions weak + * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption + * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior + * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference */ /** - * Constructs a new FileDescriptorProto. + * Constructs a new FieldOptions. * @memberof google.protobuf - * @classdesc Represents a FileDescriptorProto. - * @implements IFileDescriptorProto + * @classdesc Represents a FieldOptions. + * @implements IFieldOptions * @constructor - * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set */ - function FileDescriptorProto(properties) { - this.dependency = []; - this.publicDependency = []; - this.weakDependency = []; - this.messageType = []; - this.enumType = []; - this.service = []; - this.extension = []; + function FieldOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.fieldBehavior"] = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -11393,249 +19490,193 @@ } /** - * FileDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.name = ""; - - /** - * FileDescriptorProto package. - * @member {string} package - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype["package"] = ""; - - /** - * FileDescriptorProto dependency. - * @member {Array.} dependency - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.dependency = $util.emptyArray; - - /** - * FileDescriptorProto publicDependency. - * @member {Array.} publicDependency - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions ctype. + * @member {google.protobuf.FieldOptions.CType} ctype + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.publicDependency = $util.emptyArray; + FieldOptions.prototype.ctype = 0; /** - * FileDescriptorProto weakDependency. - * @member {Array.} weakDependency - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions packed. + * @member {boolean} packed + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + FieldOptions.prototype.packed = false; /** - * FileDescriptorProto messageType. - * @member {Array.} messageType - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions jstype. + * @member {google.protobuf.FieldOptions.JSType} jstype + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.messageType = $util.emptyArray; + FieldOptions.prototype.jstype = 0; /** - * FileDescriptorProto enumType. - * @member {Array.} enumType - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions lazy. + * @member {boolean} lazy + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.enumType = $util.emptyArray; + FieldOptions.prototype.lazy = false; /** - * FileDescriptorProto service. - * @member {Array.} service - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.service = $util.emptyArray; + FieldOptions.prototype.deprecated = false; /** - * FileDescriptorProto extension. - * @member {Array.} extension - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions weak. + * @member {boolean} weak + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.extension = $util.emptyArray; + FieldOptions.prototype.weak = false; /** - * FileDescriptorProto options. - * @member {google.protobuf.IFileOptions|null|undefined} options - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.options = null; + FieldOptions.prototype.uninterpretedOption = $util.emptyArray; /** - * FileDescriptorProto sourceCodeInfo. - * @member {google.protobuf.ISourceCodeInfo|null|undefined} sourceCodeInfo - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions .google.api.fieldBehavior. + * @member {Array.} .google.api.fieldBehavior + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.sourceCodeInfo = null; + FieldOptions.prototype[".google.api.fieldBehavior"] = $util.emptyArray; /** - * FileDescriptorProto syntax. - * @member {string} syntax - * @memberof google.protobuf.FileDescriptorProto + * FieldOptions .google.api.resourceReference. + * @member {google.api.IResourceReference|null|undefined} .google.api.resourceReference + * @memberof google.protobuf.FieldOptions * @instance */ - FileDescriptorProto.prototype.syntax = ""; + FieldOptions.prototype[".google.api.resourceReference"] = null; /** - * Creates a new FileDescriptorProto instance using the specified properties. + * Creates a new FieldOptions instance using the specified properties. * @function create - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static - * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto instance + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions} FieldOptions instance */ - FileDescriptorProto.create = function create(properties) { - return new FileDescriptorProto(properties); + FieldOptions.create = function create(properties) { + return new FieldOptions(properties); }; /** - * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. * @function encode - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static - * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileDescriptorProto.encode = function encode(message, writer) { + FieldOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); - if (message.dependency != null && message.dependency.length) - for (var i = 0; i < message.dependency.length; ++i) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.dependency[i]); - if (message.messageType != null && message.messageType.length) - for (var i = 0; i < message.messageType.length; ++i) - $root.google.protobuf.DescriptorProto.encode(message.messageType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.enumType != null && message.enumType.length) - for (var i = 0; i < message.enumType.length; ++i) - $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.service != null && message.service.length) - for (var i = 0; i < message.service.length; ++i) - $root.google.protobuf.ServiceDescriptorProto.encode(message.service[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.extension != null && message.extension.length) - for (var i = 0; i < message.extension.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) - $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.publicDependency != null && message.publicDependency.length) - for (var i = 0; i < message.publicDependency.length; ++i) - writer.uint32(/* id 10, wireType 0 =*/80).int32(message.publicDependency[i]); - if (message.weakDependency != null && message.weakDependency.length) - for (var i = 0; i < message.weakDependency.length; ++i) - writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); - if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) - writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); + if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); + if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); + if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); + if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { + writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + writer.int32(message[".google.api.fieldBehavior"][i]); + writer.ldelim(); + } + if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) + $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); return writer; }; /** - * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static - * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + FieldOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FileDescriptorProto message from the specified reader or buffer. + * Decodes a FieldOptions message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @returns {google.protobuf.FieldOptions} FieldOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorProto.decode = function decode(reader, length) { + FieldOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.name = reader.string(); + message.ctype = reader.int32(); break; case 2: - message["package"] = reader.string(); + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32(); + break; + case 5: + message.lazy = reader.bool(); break; case 3: - if (!(message.dependency && message.dependency.length)) - message.dependency = []; - message.dependency.push(reader.string()); + message.deprecated = reader.bool(); break; case 10: - if (!(message.publicDependency && message.publicDependency.length)) - message.publicDependency = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.publicDependency.push(reader.int32()); - } else - message.publicDependency.push(reader.int32()); + message.weak = reader.bool(); break; - case 11: - if (!(message.weakDependency && message.weakDependency.length)) - message.weakDependency = []; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + case 1052: + if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) + message[".google.api.fieldBehavior"] = []; if ((tag & 7) === 2) { var end2 = reader.uint32() + reader.pos; while (reader.pos < end2) - message.weakDependency.push(reader.int32()); + message[".google.api.fieldBehavior"].push(reader.int32()); } else - message.weakDependency.push(reader.int32()); - break; - case 4: - if (!(message.messageType && message.messageType.length)) - message.messageType = []; - message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - case 6: - if (!(message.service && message.service.length)) - message.service = []; - message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); - break; - case 7: - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 8: - message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); - break; - case 9: - message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); + message[".google.api.fieldBehavior"].push(reader.int32()); break; - case 12: - message.syntax = reader.string(); + case 1055: + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); break; default: reader.skipType(tag & 7); @@ -11646,329 +19687,317 @@ }; /** - * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @returns {google.protobuf.FieldOptions} FieldOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + FieldOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a FileDescriptorProto message. + * Verifies a FieldOptions message. * @function verify - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - FileDescriptorProto.verify = function verify(message) { + FieldOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message["package"] != null && message.hasOwnProperty("package")) - if (!$util.isString(message["package"])) - return "package: string expected"; - if (message.dependency != null && message.hasOwnProperty("dependency")) { - if (!Array.isArray(message.dependency)) - return "dependency: array expected"; - for (var i = 0; i < message.dependency.length; ++i) - if (!$util.isString(message.dependency[i])) - return "dependency: string[] expected"; - } - if (message.publicDependency != null && message.hasOwnProperty("publicDependency")) { - if (!Array.isArray(message.publicDependency)) - return "publicDependency: array expected"; - for (var i = 0; i < message.publicDependency.length; ++i) - if (!$util.isInteger(message.publicDependency[i])) - return "publicDependency: integer[] expected"; - } - if (message.weakDependency != null && message.hasOwnProperty("weakDependency")) { - if (!Array.isArray(message.weakDependency)) - return "weakDependency: array expected"; - for (var i = 0; i < message.weakDependency.length; ++i) - if (!$util.isInteger(message.weakDependency[i])) - return "weakDependency: integer[] expected"; - } - if (message.messageType != null && message.hasOwnProperty("messageType")) { - if (!Array.isArray(message.messageType)) - return "messageType: array expected"; - for (var i = 0; i < message.messageType.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.verify(message.messageType[i]); - if (error) - return "messageType." + error; - } - } - if (message.enumType != null && message.hasOwnProperty("enumType")) { - if (!Array.isArray(message.enumType)) - return "enumType: array expected"; - for (var i = 0; i < message.enumType.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); - if (error) - return "enumType." + error; + if (message.ctype != null && message.hasOwnProperty("ctype")) + switch (message.ctype) { + default: + return "ctype: enum value expected"; + case 0: + case 1: + case 2: + break; } - } - if (message.service != null && message.hasOwnProperty("service")) { - if (!Array.isArray(message.service)) - return "service: array expected"; - for (var i = 0; i < message.service.length; ++i) { - var error = $root.google.protobuf.ServiceDescriptorProto.verify(message.service[i]); - if (error) - return "service." + error; + if (message.packed != null && message.hasOwnProperty("packed")) + if (typeof message.packed !== "boolean") + return "packed: boolean expected"; + if (message.jstype != null && message.hasOwnProperty("jstype")) + switch (message.jstype) { + default: + return "jstype: enum value expected"; + case 0: + case 1: + case 2: + break; } - } - if (message.extension != null && message.hasOwnProperty("extension")) { - if (!Array.isArray(message.extension)) - return "extension: array expected"; - for (var i = 0; i < message.extension.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (message.lazy != null && message.hasOwnProperty("lazy")) + if (typeof message.lazy !== "boolean") + return "lazy: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.weak != null && message.hasOwnProperty("weak")) + if (typeof message.weak !== "boolean") + return "weak: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); if (error) - return "extension." + error; + return "uninterpretedOption." + error; } } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.FileOptions.verify(message.options); - if (error) - return "options." + error; + if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { + if (!Array.isArray(message[".google.api.fieldBehavior"])) + return ".google.api.fieldBehavior: array expected"; + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + switch (message[".google.api.fieldBehavior"][i]) { + default: + return ".google.api.fieldBehavior: enum value[] expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + break; + } } - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) { - var error = $root.google.protobuf.SourceCodeInfo.verify(message.sourceCodeInfo); + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) { + var error = $root.google.api.ResourceReference.verify(message[".google.api.resourceReference"]); if (error) - return "sourceCodeInfo." + error; + return ".google.api.resourceReference." + error; } - if (message.syntax != null && message.hasOwnProperty("syntax")) - if (!$util.isString(message.syntax)) - return "syntax: string expected"; return null; }; /** - * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static * @param {Object.} object Plain object - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @returns {google.protobuf.FieldOptions} FieldOptions */ - FileDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileDescriptorProto) + FieldOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions) return object; - var message = new $root.google.protobuf.FileDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object["package"] != null) - message["package"] = String(object["package"]); - if (object.dependency) { - if (!Array.isArray(object.dependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.dependency: array expected"); - message.dependency = []; - for (var i = 0; i < object.dependency.length; ++i) - message.dependency[i] = String(object.dependency[i]); - } - if (object.publicDependency) { - if (!Array.isArray(object.publicDependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.publicDependency: array expected"); - message.publicDependency = []; - for (var i = 0; i < object.publicDependency.length; ++i) - message.publicDependency[i] = object.publicDependency[i] | 0; - } - if (object.weakDependency) { - if (!Array.isArray(object.weakDependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.weakDependency: array expected"); - message.weakDependency = []; - for (var i = 0; i < object.weakDependency.length; ++i) - message.weakDependency[i] = object.weakDependency[i] | 0; - } - if (object.messageType) { - if (!Array.isArray(object.messageType)) - throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); - message.messageType = []; - for (var i = 0; i < object.messageType.length; ++i) { - if (typeof object.messageType[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.messageType: object expected"); - message.messageType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.messageType[i]); - } - } - if (object.enumType) { - if (!Array.isArray(object.enumType)) - throw TypeError(".google.protobuf.FileDescriptorProto.enumType: array expected"); - message.enumType = []; - for (var i = 0; i < object.enumType.length; ++i) { - if (typeof object.enumType[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.enumType: object expected"); - message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); - } + var message = new $root.google.protobuf.FieldOptions(); + switch (object.ctype) { + case "STRING": + case 0: + message.ctype = 0; + break; + case "CORD": + case 1: + message.ctype = 1; + break; + case "STRING_PIECE": + case 2: + message.ctype = 2; + break; } - if (object.service) { - if (!Array.isArray(object.service)) - throw TypeError(".google.protobuf.FileDescriptorProto.service: array expected"); - message.service = []; - for (var i = 0; i < object.service.length; ++i) { - if (typeof object.service[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.service: object expected"); - message.service[i] = $root.google.protobuf.ServiceDescriptorProto.fromObject(object.service[i]); - } + if (object.packed != null) + message.packed = Boolean(object.packed); + switch (object.jstype) { + case "JS_NORMAL": + case 0: + message.jstype = 0; + break; + case "JS_STRING": + case 1: + message.jstype = 1; + break; + case "JS_NUMBER": + case 2: + message.jstype = 2; + break; } - if (object.extension) { - if (!Array.isArray(object.extension)) - throw TypeError(".google.protobuf.FileDescriptorProto.extension: array expected"); - message.extension = []; - for (var i = 0; i < object.extension.length; ++i) { - if (typeof object.extension[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.extension: object expected"); - message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + if (object.lazy != null) + message.lazy = Boolean(object.lazy); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.weak != null) + message.weak = Boolean(object.weak); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.FileOptions.fromObject(object.options); + if (object[".google.api.fieldBehavior"]) { + if (!Array.isArray(object[".google.api.fieldBehavior"])) + throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); + message[".google.api.fieldBehavior"] = []; + for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) + switch (object[".google.api.fieldBehavior"][i]) { + default: + case "FIELD_BEHAVIOR_UNSPECIFIED": + case 0: + message[".google.api.fieldBehavior"][i] = 0; + break; + case "OPTIONAL": + case 1: + message[".google.api.fieldBehavior"][i] = 1; + break; + case "REQUIRED": + case 2: + message[".google.api.fieldBehavior"][i] = 2; + break; + case "OUTPUT_ONLY": + case 3: + message[".google.api.fieldBehavior"][i] = 3; + break; + case "INPUT_ONLY": + case 4: + message[".google.api.fieldBehavior"][i] = 4; + break; + case "IMMUTABLE": + case 5: + message[".google.api.fieldBehavior"][i] = 5; + break; + case "UNORDERED_LIST": + case 6: + message[".google.api.fieldBehavior"][i] = 6; + break; + } } - if (object.sourceCodeInfo != null) { - if (typeof object.sourceCodeInfo !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.sourceCodeInfo: object expected"); - message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.fromObject(object.sourceCodeInfo); + if (object[".google.api.resourceReference"] != null) { + if (typeof object[".google.api.resourceReference"] !== "object") + throw TypeError(".google.protobuf.FieldOptions..google.api.resourceReference: object expected"); + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.fromObject(object[".google.api.resourceReference"]); } - if (object.syntax != null) - message.syntax = String(object.syntax); return message; }; /** - * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @static - * @param {google.protobuf.FileDescriptorProto} message FileDescriptorProto + * @param {google.protobuf.FieldOptions} message FieldOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FileDescriptorProto.toObject = function toObject(message, options) { + FieldOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.arrays || options.defaults) { - object.dependency = []; - object.messageType = []; - object.enumType = []; - object.service = []; - object.extension = []; - object.publicDependency = []; - object.weakDependency = []; + object.uninterpretedOption = []; + object[".google.api.fieldBehavior"] = []; } if (options.defaults) { - object.name = ""; - object["package"] = ""; - object.options = null; - object.sourceCodeInfo = null; - object.syntax = ""; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message["package"] != null && message.hasOwnProperty("package")) - object["package"] = message["package"]; - if (message.dependency && message.dependency.length) { - object.dependency = []; - for (var j = 0; j < message.dependency.length; ++j) - object.dependency[j] = message.dependency[j]; - } - if (message.messageType && message.messageType.length) { - object.messageType = []; - for (var j = 0; j < message.messageType.length; ++j) - object.messageType[j] = $root.google.protobuf.DescriptorProto.toObject(message.messageType[j], options); - } - if (message.enumType && message.enumType.length) { - object.enumType = []; - for (var j = 0; j < message.enumType.length; ++j) - object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); - } - if (message.service && message.service.length) { - object.service = []; - for (var j = 0; j < message.service.length; ++j) - object.service[j] = $root.google.protobuf.ServiceDescriptorProto.toObject(message.service[j], options); - } - if (message.extension && message.extension.length) { - object.extension = []; - for (var j = 0; j < message.extension.length; ++j) - object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + object.ctype = options.enums === String ? "STRING" : 0; + object.packed = false; + object.deprecated = false; + object.lazy = false; + object.jstype = options.enums === String ? "JS_NORMAL" : 0; + object.weak = false; + object[".google.api.resourceReference"] = null; } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.FileOptions.toObject(message.options, options); - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) - object.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.toObject(message.sourceCodeInfo, options); - if (message.publicDependency && message.publicDependency.length) { - object.publicDependency = []; - for (var j = 0; j < message.publicDependency.length; ++j) - object.publicDependency[j] = message.publicDependency[j]; + if (message.ctype != null && message.hasOwnProperty("ctype")) + object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; + if (message.packed != null && message.hasOwnProperty("packed")) + object.packed = message.packed; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.lazy != null && message.hasOwnProperty("lazy")) + object.lazy = message.lazy; + if (message.jstype != null && message.hasOwnProperty("jstype")) + object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; + if (message.weak != null && message.hasOwnProperty("weak")) + object.weak = message.weak; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); } - if (message.weakDependency && message.weakDependency.length) { - object.weakDependency = []; - for (var j = 0; j < message.weakDependency.length; ++j) - object.weakDependency[j] = message.weakDependency[j]; + if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { + object[".google.api.fieldBehavior"] = []; + for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) + object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; } - if (message.syntax != null && message.hasOwnProperty("syntax")) - object.syntax = message.syntax; + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); return object; }; /** - * Converts this FileDescriptorProto to JSON. + * Converts this FieldOptions to JSON. * @function toJSON - * @memberof google.protobuf.FileDescriptorProto + * @memberof google.protobuf.FieldOptions * @instance * @returns {Object.} JSON object */ - FileDescriptorProto.prototype.toJSON = function toJSON() { + FieldOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return FileDescriptorProto; + /** + * CType enum. + * @name google.protobuf.FieldOptions.CType + * @enum {number} + * @property {number} STRING=0 STRING value + * @property {number} CORD=1 CORD value + * @property {number} STRING_PIECE=2 STRING_PIECE value + */ + FieldOptions.CType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STRING"] = 0; + values[valuesById[1] = "CORD"] = 1; + values[valuesById[2] = "STRING_PIECE"] = 2; + return values; + })(); + + /** + * JSType enum. + * @name google.protobuf.FieldOptions.JSType + * @enum {number} + * @property {number} JS_NORMAL=0 JS_NORMAL value + * @property {number} JS_STRING=1 JS_STRING value + * @property {number} JS_NUMBER=2 JS_NUMBER value + */ + FieldOptions.JSType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "JS_NORMAL"] = 0; + values[valuesById[1] = "JS_STRING"] = 1; + values[valuesById[2] = "JS_NUMBER"] = 2; + return values; + })(); + + return FieldOptions; })(); - protobuf.DescriptorProto = (function() { + protobuf.OneofOptions = (function() { /** - * Properties of a DescriptorProto. + * Properties of an OneofOptions. * @memberof google.protobuf - * @interface IDescriptorProto - * @property {string|null} [name] DescriptorProto name - * @property {Array.|null} [field] DescriptorProto field - * @property {Array.|null} [extension] DescriptorProto extension - * @property {Array.|null} [nestedType] DescriptorProto nestedType - * @property {Array.|null} [enumType] DescriptorProto enumType - * @property {Array.|null} [extensionRange] DescriptorProto extensionRange - * @property {Array.|null} [oneofDecl] DescriptorProto oneofDecl - * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options - * @property {Array.|null} [reservedRange] DescriptorProto reservedRange - * @property {Array.|null} [reservedName] DescriptorProto reservedName + * @interface IOneofOptions + * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption */ /** - * Constructs a new DescriptorProto. + * Constructs a new OneofOptions. * @memberof google.protobuf - * @classdesc Represents a DescriptorProto. - * @implements IDescriptorProto + * @classdesc Represents an OneofOptions. + * @implements IOneofOptions * @constructor - * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set */ - function DescriptorProto(properties) { - this.field = []; - this.extension = []; - this.nestedType = []; - this.enumType = []; - this.extensionRange = []; - this.oneofDecl = []; - this.reservedRange = []; - this.reservedName = []; + function OneofOptions(properties) { + this.uninterpretedOption = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -11976,216 +20005,78 @@ } /** - * DescriptorProto name. - * @member {string} name - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.name = ""; - - /** - * DescriptorProto field. - * @member {Array.} field - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.field = $util.emptyArray; - - /** - * DescriptorProto extension. - * @member {Array.} extension - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.extension = $util.emptyArray; - - /** - * DescriptorProto nestedType. - * @member {Array.} nestedType - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.nestedType = $util.emptyArray; - - /** - * DescriptorProto enumType. - * @member {Array.} enumType - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.enumType = $util.emptyArray; - - /** - * DescriptorProto extensionRange. - * @member {Array.} extensionRange - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.extensionRange = $util.emptyArray; - - /** - * DescriptorProto oneofDecl. - * @member {Array.} oneofDecl - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.oneofDecl = $util.emptyArray; - - /** - * DescriptorProto options. - * @member {google.protobuf.IMessageOptions|null|undefined} options - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.options = null; - - /** - * DescriptorProto reservedRange. - * @member {Array.} reservedRange - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.reservedRange = $util.emptyArray; - - /** - * DescriptorProto reservedName. - * @member {Array.} reservedName - * @memberof google.protobuf.DescriptorProto + * OneofOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.OneofOptions * @instance */ - DescriptorProto.prototype.reservedName = $util.emptyArray; + OneofOptions.prototype.uninterpretedOption = $util.emptyArray; /** - * Creates a new DescriptorProto instance using the specified properties. + * Creates a new OneofOptions instance using the specified properties. * @function create - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static - * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto} DescriptorProto instance + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + * @returns {google.protobuf.OneofOptions} OneofOptions instance */ - DescriptorProto.create = function create(properties) { - return new DescriptorProto(properties); + OneofOptions.create = function create(properties) { + return new OneofOptions(properties); }; /** - * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. * @function encode - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static - * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - DescriptorProto.encode = function encode(message, writer) { + OneofOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.field != null && message.field.length) - for (var i = 0; i < message.field.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.field[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.nestedType != null && message.nestedType.length) - for (var i = 0; i < message.nestedType.length; ++i) - $root.google.protobuf.DescriptorProto.encode(message.nestedType[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.enumType != null && message.enumType.length) - for (var i = 0; i < message.enumType.length; ++i) - $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.extensionRange != null && message.extensionRange.length) - for (var i = 0; i < message.extensionRange.length; ++i) - $root.google.protobuf.DescriptorProto.ExtensionRange.encode(message.extensionRange[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.extension != null && message.extension.length) - for (var i = 0; i < message.extension.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.oneofDecl != null && message.oneofDecl.length) - for (var i = 0; i < message.oneofDecl.length; ++i) - $root.google.protobuf.OneofDescriptorProto.encode(message.oneofDecl[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.reservedRange != null && message.reservedRange.length) - for (var i = 0; i < message.reservedRange.length; ++i) - $root.google.protobuf.DescriptorProto.ReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.reservedName != null && message.reservedName.length) - for (var i = 0; i < message.reservedName.length; ++i) - writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); return writer; }; /** - * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static - * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - DescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a DescriptorProto message from the specified reader or buffer. + * Decodes an OneofOptions message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @returns {google.protobuf.OneofOptions} OneofOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DescriptorProto.decode = function decode(reader, length) { + OneofOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.field && message.field.length)) - message.field = []; - message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 6: - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - if (!(message.nestedType && message.nestedType.length)) - message.nestedType = []; - message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - case 4: - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.extensionRange && message.extensionRange.length)) - message.extensionRange = []; - message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); - break; - case 8: - if (!(message.oneofDecl && message.oneofDecl.length)) - message.oneofDecl = []; - message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); - break; - case 7: - message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); - break; - case 9: - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); - break; - case 10: - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -12196,765 +20087,612 @@ }; /** - * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @returns {google.protobuf.OneofOptions} OneofOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DescriptorProto.decodeDelimited = function decodeDelimited(reader) { + OneofOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a DescriptorProto message. + * Verifies an OneofOptions message. * @function verify - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - DescriptorProto.verify = function verify(message) { + OneofOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.field != null && message.hasOwnProperty("field")) { - if (!Array.isArray(message.field)) - return "field: array expected"; - for (var i = 0; i < message.field.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.field[i]); - if (error) - return "field." + error; - } - } - if (message.extension != null && message.hasOwnProperty("extension")) { - if (!Array.isArray(message.extension)) - return "extension: array expected"; - for (var i = 0; i < message.extension.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); - if (error) - return "extension." + error; - } - } - if (message.nestedType != null && message.hasOwnProperty("nestedType")) { - if (!Array.isArray(message.nestedType)) - return "nestedType: array expected"; - for (var i = 0; i < message.nestedType.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.verify(message.nestedType[i]); - if (error) - return "nestedType." + error; - } - } - if (message.enumType != null && message.hasOwnProperty("enumType")) { - if (!Array.isArray(message.enumType)) - return "enumType: array expected"; - for (var i = 0; i < message.enumType.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); - if (error) - return "enumType." + error; - } - } - if (message.extensionRange != null && message.hasOwnProperty("extensionRange")) { - if (!Array.isArray(message.extensionRange)) - return "extensionRange: array expected"; - for (var i = 0; i < message.extensionRange.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.ExtensionRange.verify(message.extensionRange[i]); - if (error) - return "extensionRange." + error; - } - } - if (message.oneofDecl != null && message.hasOwnProperty("oneofDecl")) { - if (!Array.isArray(message.oneofDecl)) - return "oneofDecl: array expected"; - for (var i = 0; i < message.oneofDecl.length; ++i) { - var error = $root.google.protobuf.OneofDescriptorProto.verify(message.oneofDecl[i]); - if (error) - return "oneofDecl." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.MessageOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { - if (!Array.isArray(message.reservedRange)) - return "reservedRange: array expected"; - for (var i = 0; i < message.reservedRange.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.ReservedRange.verify(message.reservedRange[i]); + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); if (error) - return "reservedRange." + error; + return "uninterpretedOption." + error; } } - if (message.reservedName != null && message.hasOwnProperty("reservedName")) { - if (!Array.isArray(message.reservedName)) - return "reservedName: array expected"; - for (var i = 0; i < message.reservedName.length; ++i) - if (!$util.isString(message.reservedName[i])) - return "reservedName: string[] expected"; - } return null; }; /** - * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @returns {google.protobuf.OneofOptions} OneofOptions */ - DescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto) + OneofOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofOptions) return object; - var message = new $root.google.protobuf.DescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.field) { - if (!Array.isArray(object.field)) - throw TypeError(".google.protobuf.DescriptorProto.field: array expected"); - message.field = []; - for (var i = 0; i < object.field.length; ++i) { - if (typeof object.field[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.field: object expected"); - message.field[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.field[i]); - } - } - if (object.extension) { - if (!Array.isArray(object.extension)) - throw TypeError(".google.protobuf.DescriptorProto.extension: array expected"); - message.extension = []; - for (var i = 0; i < object.extension.length; ++i) { - if (typeof object.extension[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.extension: object expected"); - message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); - } - } - if (object.nestedType) { - if (!Array.isArray(object.nestedType)) - throw TypeError(".google.protobuf.DescriptorProto.nestedType: array expected"); - message.nestedType = []; - for (var i = 0; i < object.nestedType.length; ++i) { - if (typeof object.nestedType[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.nestedType: object expected"); - message.nestedType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.nestedType[i]); - } - } - if (object.enumType) { - if (!Array.isArray(object.enumType)) - throw TypeError(".google.protobuf.DescriptorProto.enumType: array expected"); - message.enumType = []; - for (var i = 0; i < object.enumType.length; ++i) { - if (typeof object.enumType[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.enumType: object expected"); - message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); - } - } - if (object.extensionRange) { - if (!Array.isArray(object.extensionRange)) - throw TypeError(".google.protobuf.DescriptorProto.extensionRange: array expected"); - message.extensionRange = []; - for (var i = 0; i < object.extensionRange.length; ++i) { - if (typeof object.extensionRange[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.extensionRange: object expected"); - message.extensionRange[i] = $root.google.protobuf.DescriptorProto.ExtensionRange.fromObject(object.extensionRange[i]); - } - } - if (object.oneofDecl) { - if (!Array.isArray(object.oneofDecl)) - throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: array expected"); - message.oneofDecl = []; - for (var i = 0; i < object.oneofDecl.length; ++i) { - if (typeof object.oneofDecl[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: object expected"); - message.oneofDecl[i] = $root.google.protobuf.OneofDescriptorProto.fromObject(object.oneofDecl[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.DescriptorProto.options: object expected"); - message.options = $root.google.protobuf.MessageOptions.fromObject(object.options); - } - if (object.reservedRange) { - if (!Array.isArray(object.reservedRange)) - throw TypeError(".google.protobuf.DescriptorProto.reservedRange: array expected"); - message.reservedRange = []; - for (var i = 0; i < object.reservedRange.length; ++i) { - if (typeof object.reservedRange[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.reservedRange: object expected"); - message.reservedRange[i] = $root.google.protobuf.DescriptorProto.ReservedRange.fromObject(object.reservedRange[i]); + var message = new $root.google.protobuf.OneofOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } - if (object.reservedName) { - if (!Array.isArray(object.reservedName)) - throw TypeError(".google.protobuf.DescriptorProto.reservedName: array expected"); - message.reservedName = []; - for (var i = 0; i < object.reservedName.length; ++i) - message.reservedName[i] = String(object.reservedName[i]); - } return message; }; /** - * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @static - * @param {google.protobuf.DescriptorProto} message DescriptorProto + * @param {google.protobuf.OneofOptions} message OneofOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - DescriptorProto.toObject = function toObject(message, options) { + OneofOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.field = []; - object.nestedType = []; - object.enumType = []; - object.extensionRange = []; - object.extension = []; - object.oneofDecl = []; - object.reservedRange = []; - object.reservedName = []; - } - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.field && message.field.length) { - object.field = []; - for (var j = 0; j < message.field.length; ++j) - object.field[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.field[j], options); - } - if (message.nestedType && message.nestedType.length) { - object.nestedType = []; - for (var j = 0; j < message.nestedType.length; ++j) - object.nestedType[j] = $root.google.protobuf.DescriptorProto.toObject(message.nestedType[j], options); - } - if (message.enumType && message.enumType.length) { - object.enumType = []; - for (var j = 0; j < message.enumType.length; ++j) - object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); - } - if (message.extensionRange && message.extensionRange.length) { - object.extensionRange = []; - for (var j = 0; j < message.extensionRange.length; ++j) - object.extensionRange[j] = $root.google.protobuf.DescriptorProto.ExtensionRange.toObject(message.extensionRange[j], options); - } - if (message.extension && message.extension.length) { - object.extension = []; - for (var j = 0; j < message.extension.length; ++j) - object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.MessageOptions.toObject(message.options, options); - if (message.oneofDecl && message.oneofDecl.length) { - object.oneofDecl = []; - for (var j = 0; j < message.oneofDecl.length; ++j) - object.oneofDecl[j] = $root.google.protobuf.OneofDescriptorProto.toObject(message.oneofDecl[j], options); - } - if (message.reservedRange && message.reservedRange.length) { - object.reservedRange = []; - for (var j = 0; j < message.reservedRange.length; ++j) - object.reservedRange[j] = $root.google.protobuf.DescriptorProto.ReservedRange.toObject(message.reservedRange[j], options); - } - if (message.reservedName && message.reservedName.length) { - object.reservedName = []; - for (var j = 0; j < message.reservedName.length; ++j) - object.reservedName[j] = message.reservedName[j]; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); } return object; }; /** - * Converts this DescriptorProto to JSON. + * Converts this OneofOptions to JSON. * @function toJSON - * @memberof google.protobuf.DescriptorProto + * @memberof google.protobuf.OneofOptions * @instance * @returns {Object.} JSON object */ - DescriptorProto.prototype.toJSON = function toJSON() { + OneofOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - DescriptorProto.ExtensionRange = (function() { - - /** - * Properties of an ExtensionRange. - * @memberof google.protobuf.DescriptorProto - * @interface IExtensionRange - * @property {number|null} [start] ExtensionRange start - * @property {number|null} [end] ExtensionRange end - * @property {google.protobuf.IExtensionRangeOptions|null} [options] ExtensionRange options - */ + return OneofOptions; + })(); - /** - * Constructs a new ExtensionRange. - * @memberof google.protobuf.DescriptorProto - * @classdesc Represents an ExtensionRange. - * @implements IExtensionRange - * @constructor - * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set - */ - function ExtensionRange(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + protobuf.EnumOptions = (function() { - /** - * ExtensionRange start. - * @member {number} start - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.start = 0; + /** + * Properties of an EnumOptions. + * @memberof google.protobuf + * @interface IEnumOptions + * @property {boolean|null} [allowAlias] EnumOptions allowAlias + * @property {boolean|null} [deprecated] EnumOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption + */ - /** - * ExtensionRange end. - * @member {number} end - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.end = 0; + /** + * Constructs a new EnumOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumOptions. + * @implements IEnumOptions + * @constructor + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + */ + function EnumOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * ExtensionRange options. - * @member {google.protobuf.IExtensionRangeOptions|null|undefined} options - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.options = null; + /** + * EnumOptions allowAlias. + * @member {boolean} allowAlias + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.allowAlias = false; - /** - * Creates a new ExtensionRange instance using the specified properties. - * @function create - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange instance - */ - ExtensionRange.create = function create(properties) { - return new ExtensionRange(properties); - }; + /** + * EnumOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.deprecated = false; - /** - * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRange.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; + /** + * EnumOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.uninterpretedOption = $util.emptyArray; - /** - * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRange.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * Creates a new EnumOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumOptions} EnumOptions instance + */ + EnumOptions.create = function create(properties) { + return new EnumOptions(properties); + }; - /** - * Decodes an ExtensionRange message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRange.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.start = reader.int32(); - break; - case 2: - message.end = reader.int32(); - break; - case 3: - message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; - /** - * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRange.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; - /** - * Verifies an ExtensionRange message. - * @function verify - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ExtensionRange.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.ExtensionRangeOptions.verify(message.options); - if (error) - return "options." + error; + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; } - return null; - }; + } + return message; + }; - /** - * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - */ - ExtensionRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto.ExtensionRange) - return object; - var message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected"); - message.options = $root.google.protobuf.ExtensionRangeOptions.fromObject(object.options); - } - return message; - }; + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.ExtensionRange} message ExtensionRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ExtensionRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; - object.options = null; + /** + * Verifies an EnumOptions message. + * @function verify + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + if (typeof message.allowAlias !== "boolean") + return "allowAlias: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.ExtensionRangeOptions.toObject(message.options, options); - return object; - }; + } + return null; + }; - /** - * Converts this ExtensionRange to JSON. - * @function toJSON - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - * @returns {Object.} JSON object - */ - ExtensionRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumOptions} EnumOptions + */ + EnumOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumOptions) + return object; + var message = new $root.google.protobuf.EnumOptions(); + if (object.allowAlias != null) + message.allowAlias = Boolean(object.allowAlias); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; - return ExtensionRange; - })(); + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.EnumOptions} message EnumOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.allowAlias = false; + object.deprecated = false; + } + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + object.allowAlias = message.allowAlias; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; - DescriptorProto.ReservedRange = (function() { + /** + * Converts this EnumOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumOptions + * @instance + * @returns {Object.} JSON object + */ + EnumOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Properties of a ReservedRange. - * @memberof google.protobuf.DescriptorProto - * @interface IReservedRange - * @property {number|null} [start] ReservedRange start - * @property {number|null} [end] ReservedRange end - */ + return EnumOptions; + })(); - /** - * Constructs a new ReservedRange. - * @memberof google.protobuf.DescriptorProto - * @classdesc Represents a ReservedRange. - * @implements IReservedRange - * @constructor - * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set - */ - function ReservedRange(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + protobuf.EnumValueOptions = (function() { - /** - * ReservedRange start. - * @member {number} start - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - */ - ReservedRange.prototype.start = 0; + /** + * Properties of an EnumValueOptions. + * @memberof google.protobuf + * @interface IEnumValueOptions + * @property {boolean|null} [deprecated] EnumValueOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption + */ - /** - * ReservedRange end. - * @member {number} end - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - */ - ReservedRange.prototype.end = 0; + /** + * Constructs a new EnumValueOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumValueOptions. + * @implements IEnumValueOptions + * @constructor + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + */ + function EnumValueOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Creates a new ReservedRange instance using the specified properties. - * @function create - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange instance - */ - ReservedRange.create = function create(properties) { - return new ReservedRange(properties); - }; + /** + * EnumValueOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.deprecated = false; - /** - * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReservedRange.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - return writer; - }; + /** + * EnumValueOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; - /** - * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReservedRange.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance + */ + EnumValueOptions.create = function create(properties) { + return new EnumValueOptions(properties); + }; - /** - * Decodes a ReservedRange message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReservedRange.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.start = reader.int32(); - break; - case 2: - message.end = reader.int32(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; - /** - * Decodes a ReservedRange message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReservedRange.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; - /** - * Verifies a ReservedRange message. - * @function verify - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReservedRange.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - return null; - }; + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; - /** - * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - */ - ReservedRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto.ReservedRange) - return object; - var message = new $root.google.protobuf.DescriptorProto.ReservedRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - return message; - }; + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.ReservedRange} message ReservedRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReservedRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; + /** + * Verifies an EnumValueOptions message. + * @function verify + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumValueOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; + } + return null; + }; + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + */ + EnumValueOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueOptions) return object; - }; + var message = new $root.google.protobuf.EnumValueOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; - /** - * Converts this ReservedRange to JSON. - * @function toJSON - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - * @returns {Object.} JSON object - */ - ReservedRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.EnumValueOptions} message EnumValueOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumValueOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) + object.deprecated = false; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; - return ReservedRange; - })(); + /** + * Converts this EnumValueOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumValueOptions + * @instance + * @returns {Object.} JSON object + */ + EnumValueOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - return DescriptorProto; + return EnumValueOptions; })(); - protobuf.ExtensionRangeOptions = (function() { + protobuf.ServiceOptions = (function() { /** - * Properties of an ExtensionRangeOptions. + * Properties of a ServiceOptions. * @memberof google.protobuf - * @interface IExtensionRangeOptions - * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption + * @interface IServiceOptions + * @property {boolean|null} [deprecated] ServiceOptions deprecated + * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption + * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost + * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes */ /** - * Constructs a new ExtensionRangeOptions. + * Constructs a new ServiceOptions. * @memberof google.protobuf - * @classdesc Represents an ExtensionRangeOptions. - * @implements IExtensionRangeOptions + * @classdesc Represents a ServiceOptions. + * @implements IServiceOptions * @constructor - * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set */ - function ExtensionRangeOptions(properties) { + function ServiceOptions(properties) { this.uninterpretedOption = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) @@ -12963,79 +20701,118 @@ } /** - * ExtensionRangeOptions uninterpretedOption. + * ServiceOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.deprecated = false; + + /** + * ServiceOptions uninterpretedOption. * @member {Array.} uninterpretedOption - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @instance */ - ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; + ServiceOptions.prototype.uninterpretedOption = $util.emptyArray; /** - * Creates a new ExtensionRangeOptions instance using the specified properties. + * ServiceOptions .google.api.defaultHost. + * @member {string} .google.api.defaultHost + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.defaultHost"] = ""; + + /** + * ServiceOptions .google.api.oauthScopes. + * @member {string} .google.api.oauthScopes + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.oauthScopes"] = ""; + + /** + * Creates a new ServiceOptions instance using the specified properties. * @function create - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static - * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions instance + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + * @returns {google.protobuf.ServiceOptions} ServiceOptions instance */ - ExtensionRangeOptions.create = function create(properties) { - return new ExtensionRangeOptions(properties); + ServiceOptions.create = function create(properties) { + return new ServiceOptions(properties); }; /** - * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. * @function encode - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static - * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ExtensionRangeOptions.encode = function encode(message, writer) { + ServiceOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) + writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); + if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) + writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); return writer; }; /** - * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static - * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ExtensionRangeOptions.encodeDelimited = function encodeDelimited(message, writer) { + ServiceOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * Decodes a ServiceOptions message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @returns {google.protobuf.ServiceOptions} ServiceOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ExtensionRangeOptions.decode = function decode(reader, length) { + ServiceOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; case 999: if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; + case 1049: + message[".google.api.defaultHost"] = reader.string(); + break; + case 1050: + message[".google.api.oauthScopes"] = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -13045,32 +20822,35 @@ }; /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @returns {google.protobuf.ServiceOptions} ServiceOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ExtensionRangeOptions.decodeDelimited = function decodeDelimited(reader) { + ServiceOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an ExtensionRangeOptions message. + * Verifies a ServiceOptions message. * @function verify - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ExtensionRangeOptions.verify = function verify(message) { + ServiceOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -13080,99 +20860,118 @@ return "uninterpretedOption." + error; } } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + if (!$util.isString(message[".google.api.defaultHost"])) + return ".google.api.defaultHost: string expected"; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + if (!$util.isString(message[".google.api.oauthScopes"])) + return ".google.api.oauthScopes: string expected"; return null; }; /** - * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static * @param {Object.} object Plain object - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @returns {google.protobuf.ServiceOptions} ServiceOptions */ - ExtensionRangeOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ExtensionRangeOptions) + ServiceOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceOptions) return object; - var message = new $root.google.protobuf.ExtensionRangeOptions(); + var message = new $root.google.protobuf.ServiceOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: array expected"); + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: array expected"); message.uninterpretedOption = []; for (var i = 0; i < object.uninterpretedOption.length; ++i) { if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: object expected"); + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: object expected"); message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } + if (object[".google.api.defaultHost"] != null) + message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); + if (object[".google.api.oauthScopes"] != null) + message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); return message; }; /** - * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @static - * @param {google.protobuf.ExtensionRangeOptions} message ExtensionRangeOptions + * @param {google.protobuf.ServiceOptions} message ServiceOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ExtensionRangeOptions.toObject = function toObject(message, options) { + ServiceOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.arrays || options.defaults) object.uninterpretedOption = []; + if (options.defaults) { + object.deprecated = false; + object[".google.api.defaultHost"] = ""; + object[".google.api.oauthScopes"] = ""; + } + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; return object; }; /** - * Converts this ExtensionRangeOptions to JSON. + * Converts this ServiceOptions to JSON. * @function toJSON - * @memberof google.protobuf.ExtensionRangeOptions + * @memberof google.protobuf.ServiceOptions * @instance * @returns {Object.} JSON object */ - ExtensionRangeOptions.prototype.toJSON = function toJSON() { + ServiceOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ExtensionRangeOptions; + return ServiceOptions; })(); - protobuf.FieldDescriptorProto = (function() { + protobuf.MethodOptions = (function() { /** - * Properties of a FieldDescriptorProto. + * Properties of a MethodOptions. * @memberof google.protobuf - * @interface IFieldDescriptorProto - * @property {string|null} [name] FieldDescriptorProto name - * @property {number|null} [number] FieldDescriptorProto number - * @property {google.protobuf.FieldDescriptorProto.Label|null} [label] FieldDescriptorProto label - * @property {google.protobuf.FieldDescriptorProto.Type|null} [type] FieldDescriptorProto type - * @property {string|null} [typeName] FieldDescriptorProto typeName - * @property {string|null} [extendee] FieldDescriptorProto extendee - * @property {string|null} [defaultValue] FieldDescriptorProto defaultValue - * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex - * @property {string|null} [jsonName] FieldDescriptorProto jsonName - * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options - * @property {boolean|null} [proto3Optional] FieldDescriptorProto proto3Optional + * @interface IMethodOptions + * @property {boolean|null} [deprecated] MethodOptions deprecated + * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel + * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption + * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http + * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature */ /** - * Constructs a new FieldDescriptorProto. + * Constructs a new MethodOptions. * @memberof google.protobuf - * @classdesc Represents a FieldDescriptorProto. - * @implements IFieldDescriptorProto + * @classdesc Represents a MethodOptions. + * @implements IMethodOptions * @constructor - * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set */ - function FieldDescriptorProto(properties) { + function MethodOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.methodSignature"] = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -13180,205 +20979,133 @@ } /** - * FieldDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.name = ""; - - /** - * FieldDescriptorProto number. - * @member {number} number - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.number = 0; - - /** - * FieldDescriptorProto label. - * @member {google.protobuf.FieldDescriptorProto.Label} label - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.label = 1; - - /** - * FieldDescriptorProto type. - * @member {google.protobuf.FieldDescriptorProto.Type} type - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.type = 1; - - /** - * FieldDescriptorProto typeName. - * @member {string} typeName - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.typeName = ""; - - /** - * FieldDescriptorProto extendee. - * @member {string} extendee - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.extendee = ""; - - /** - * FieldDescriptorProto defaultValue. - * @member {string} defaultValue - * @memberof google.protobuf.FieldDescriptorProto + * MethodOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MethodOptions * @instance */ - FieldDescriptorProto.prototype.defaultValue = ""; + MethodOptions.prototype.deprecated = false; /** - * FieldDescriptorProto oneofIndex. - * @member {number} oneofIndex - * @memberof google.protobuf.FieldDescriptorProto + * MethodOptions idempotencyLevel. + * @member {google.protobuf.MethodOptions.IdempotencyLevel} idempotencyLevel + * @memberof google.protobuf.MethodOptions * @instance */ - FieldDescriptorProto.prototype.oneofIndex = 0; + MethodOptions.prototype.idempotencyLevel = 0; /** - * FieldDescriptorProto jsonName. - * @member {string} jsonName - * @memberof google.protobuf.FieldDescriptorProto + * MethodOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MethodOptions * @instance */ - FieldDescriptorProto.prototype.jsonName = ""; + MethodOptions.prototype.uninterpretedOption = $util.emptyArray; /** - * FieldDescriptorProto options. - * @member {google.protobuf.IFieldOptions|null|undefined} options - * @memberof google.protobuf.FieldDescriptorProto + * MethodOptions .google.api.http. + * @member {google.api.IHttpRule|null|undefined} .google.api.http + * @memberof google.protobuf.MethodOptions * @instance */ - FieldDescriptorProto.prototype.options = null; + MethodOptions.prototype[".google.api.http"] = null; /** - * FieldDescriptorProto proto3Optional. - * @member {boolean} proto3Optional - * @memberof google.protobuf.FieldDescriptorProto + * MethodOptions .google.api.methodSignature. + * @member {Array.} .google.api.methodSignature + * @memberof google.protobuf.MethodOptions * @instance */ - FieldDescriptorProto.prototype.proto3Optional = false; + MethodOptions.prototype[".google.api.methodSignature"] = $util.emptyArray; /** - * Creates a new FieldDescriptorProto instance using the specified properties. + * Creates a new MethodOptions instance using the specified properties. * @function create - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static - * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto instance + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + * @returns {google.protobuf.MethodOptions} MethodOptions instance */ - FieldDescriptorProto.create = function create(properties) { - return new FieldDescriptorProto(properties); + MethodOptions.create = function create(properties) { + return new MethodOptions(properties); }; /** - * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. * @function encode - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static - * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FieldDescriptorProto.encode = function encode(message, writer) { + MethodOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); - if (message.number != null && Object.hasOwnProperty.call(message, "number")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); - if (message.label != null && Object.hasOwnProperty.call(message, "label")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); - if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); - if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); - if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) - writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); - if (message.proto3Optional != null && Object.hasOwnProperty.call(message, "proto3Optional")) - writer.uint32(/* id 17, wireType 0 =*/136).bool(message.proto3Optional); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) + writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); + if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) + $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); return writer; }; /** - * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static - * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FieldDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + MethodOptions.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * Decodes a MethodOptions message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @returns {google.protobuf.MethodOptions} MethodOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldDescriptorProto.decode = function decode(reader, length) { + MethodOptions.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 3: - message.number = reader.int32(); - break; - case 4: - message.label = reader.int32(); - break; - case 5: - message.type = reader.int32(); - break; - case 6: - message.typeName = reader.string(); - break; - case 2: - message.extendee = reader.string(); - break; - case 7: - message.defaultValue = reader.string(); + case 33: + message.deprecated = reader.bool(); break; - case 9: - message.oneofIndex = reader.int32(); + case 34: + message.idempotencyLevel = reader.int32(); break; - case 10: - message.jsonName = reader.string(); + case 999: + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; - case 8: - message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); + case 72295728: + message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); break; - case 17: - message.proto3Optional = reader.bool(); + case 1051: + if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) + message[".google.api.methodSignature"] = []; + message[".google.api.methodSignature"].push(reader.string()); break; default: reader.skipType(tag & 7); @@ -13389,588 +21116,800 @@ }; /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @returns {google.protobuf.MethodOptions} MethodOptions * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + MethodOptions.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a FieldDescriptorProto message. + * Verifies a MethodOptions message. * @function verify - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - FieldDescriptorProto.verify = function verify(message) { + MethodOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.number != null && message.hasOwnProperty("number")) - if (!$util.isInteger(message.number)) - return "number: integer expected"; - if (message.label != null && message.hasOwnProperty("label")) - switch (message.label) { + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + switch (message.idempotencyLevel) { default: - return "label: enum value expected"; + return "idempotencyLevel: enum value expected"; + case 0: case 1: case 2: - case 3: break; } - if (message.type != null && message.hasOwnProperty("type")) - switch (message.type) { - default: - return "type: enum value expected"; - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - case 10: - case 11: - case 12: - case 13: - case 14: - case 15: - case 16: - case 17: - case 18: - break; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; } - if (message.typeName != null && message.hasOwnProperty("typeName")) - if (!$util.isString(message.typeName)) - return "typeName: string expected"; - if (message.extendee != null && message.hasOwnProperty("extendee")) - if (!$util.isString(message.extendee)) - return "extendee: string expected"; - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) - if (!$util.isString(message.defaultValue)) - return "defaultValue: string expected"; - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) - if (!$util.isInteger(message.oneofIndex)) - return "oneofIndex: integer expected"; - if (message.jsonName != null && message.hasOwnProperty("jsonName")) - if (!$util.isString(message.jsonName)) - return "jsonName: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.FieldOptions.verify(message.options); + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) { + var error = $root.google.api.HttpRule.verify(message[".google.api.http"]); if (error) - return "options." + error; + return ".google.api.http." + error; + } + if (message[".google.api.methodSignature"] != null && message.hasOwnProperty(".google.api.methodSignature")) { + if (!Array.isArray(message[".google.api.methodSignature"])) + return ".google.api.methodSignature: array expected"; + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + if (!$util.isString(message[".google.api.methodSignature"][i])) + return ".google.api.methodSignature: string[] expected"; } - if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) - if (typeof message.proto3Optional !== "boolean") - return "proto3Optional: boolean expected"; return null; }; /** - * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static * @param {Object.} object Plain object - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @returns {google.protobuf.MethodOptions} MethodOptions */ - FieldDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FieldDescriptorProto) + MethodOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodOptions) return object; - var message = new $root.google.protobuf.FieldDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.number != null) - message.number = object.number | 0; - switch (object.label) { - case "LABEL_OPTIONAL": - case 1: - message.label = 1; - break; - case "LABEL_REQUIRED": - case 2: - message.label = 2; - break; - case "LABEL_REPEATED": - case 3: - message.label = 3; - break; - } - switch (object.type) { - case "TYPE_DOUBLE": - case 1: - message.type = 1; - break; - case "TYPE_FLOAT": - case 2: - message.type = 2; - break; - case "TYPE_INT64": - case 3: - message.type = 3; - break; - case "TYPE_UINT64": - case 4: - message.type = 4; - break; - case "TYPE_INT32": - case 5: - message.type = 5; - break; - case "TYPE_FIXED64": - case 6: - message.type = 6; - break; - case "TYPE_FIXED32": - case 7: - message.type = 7; - break; - case "TYPE_BOOL": - case 8: - message.type = 8; - break; - case "TYPE_STRING": - case 9: - message.type = 9; - break; - case "TYPE_GROUP": - case 10: - message.type = 10; - break; - case "TYPE_MESSAGE": - case 11: - message.type = 11; - break; - case "TYPE_BYTES": - case 12: - message.type = 12; - break; - case "TYPE_UINT32": - case 13: - message.type = 13; - break; - case "TYPE_ENUM": - case 14: - message.type = 14; - break; - case "TYPE_SFIXED32": - case 15: - message.type = 15; - break; - case "TYPE_SFIXED64": - case 16: - message.type = 16; + var message = new $root.google.protobuf.MethodOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + switch (object.idempotencyLevel) { + case "IDEMPOTENCY_UNKNOWN": + case 0: + message.idempotencyLevel = 0; break; - case "TYPE_SINT32": - case 17: - message.type = 17; + case "NO_SIDE_EFFECTS": + case 1: + message.idempotencyLevel = 1; break; - case "TYPE_SINT64": - case 18: - message.type = 18; + case "IDEMPOTENT": + case 2: + message.idempotencyLevel = 2; break; } - if (object.typeName != null) - message.typeName = String(object.typeName); - if (object.extendee != null) - message.extendee = String(object.extendee); - if (object.defaultValue != null) - message.defaultValue = String(object.defaultValue); - if (object.oneofIndex != null) - message.oneofIndex = object.oneofIndex | 0; - if (object.jsonName != null) - message.jsonName = String(object.jsonName); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.http"] != null) { + if (typeof object[".google.api.http"] !== "object") + throw TypeError(".google.protobuf.MethodOptions..google.api.http: object expected"); + message[".google.api.http"] = $root.google.api.HttpRule.fromObject(object[".google.api.http"]); + } + if (object[".google.api.methodSignature"]) { + if (!Array.isArray(object[".google.api.methodSignature"])) + throw TypeError(".google.protobuf.MethodOptions..google.api.methodSignature: array expected"); + message[".google.api.methodSignature"] = []; + for (var i = 0; i < object[".google.api.methodSignature"].length; ++i) + message[".google.api.methodSignature"][i] = String(object[".google.api.methodSignature"][i]); } - if (object.proto3Optional != null) - message.proto3Optional = Boolean(object.proto3Optional); return message; }; /** - * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @static - * @param {google.protobuf.FieldDescriptorProto} message FieldDescriptorProto + * @param {google.protobuf.MethodOptions} message MethodOptions * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FieldDescriptorProto.toObject = function toObject(message, options) { + MethodOptions.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.methodSignature"] = []; + } if (options.defaults) { - object.name = ""; - object.extendee = ""; - object.number = 0; - object.label = options.enums === String ? "LABEL_OPTIONAL" : 1; - object.type = options.enums === String ? "TYPE_DOUBLE" : 1; - object.typeName = ""; - object.defaultValue = ""; - object.options = null; - object.oneofIndex = 0; - object.jsonName = ""; - object.proto3Optional = false; + object.deprecated = false; + object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; + object[".google.api.http"] = null; } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.extendee != null && message.hasOwnProperty("extendee")) - object.extendee = message.extendee; - if (message.number != null && message.hasOwnProperty("number")) - object.number = message.number; - if (message.label != null && message.hasOwnProperty("label")) - object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; - if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; - if (message.typeName != null && message.hasOwnProperty("typeName")) - object.typeName = message.typeName; - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) - object.defaultValue = message.defaultValue; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.FieldOptions.toObject(message.options, options); - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) - object.oneofIndex = message.oneofIndex; - if (message.jsonName != null && message.hasOwnProperty("jsonName")) - object.jsonName = message.jsonName; - if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) - object.proto3Optional = message.proto3Optional; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length) { + object[".google.api.methodSignature"] = []; + for (var j = 0; j < message[".google.api.methodSignature"].length; ++j) + object[".google.api.methodSignature"][j] = message[".google.api.methodSignature"][j]; + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + object[".google.api.http"] = $root.google.api.HttpRule.toObject(message[".google.api.http"], options); return object; }; /** - * Converts this FieldDescriptorProto to JSON. + * Converts this MethodOptions to JSON. * @function toJSON - * @memberof google.protobuf.FieldDescriptorProto + * @memberof google.protobuf.MethodOptions * @instance * @returns {Object.} JSON object */ - FieldDescriptorProto.prototype.toJSON = function toJSON() { + MethodOptions.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; /** - * Type enum. - * @name google.protobuf.FieldDescriptorProto.Type - * @enum {number} - * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value - * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value - * @property {number} TYPE_INT64=3 TYPE_INT64 value - * @property {number} TYPE_UINT64=4 TYPE_UINT64 value - * @property {number} TYPE_INT32=5 TYPE_INT32 value - * @property {number} TYPE_FIXED64=6 TYPE_FIXED64 value - * @property {number} TYPE_FIXED32=7 TYPE_FIXED32 value - * @property {number} TYPE_BOOL=8 TYPE_BOOL value - * @property {number} TYPE_STRING=9 TYPE_STRING value - * @property {number} TYPE_GROUP=10 TYPE_GROUP value - * @property {number} TYPE_MESSAGE=11 TYPE_MESSAGE value - * @property {number} TYPE_BYTES=12 TYPE_BYTES value - * @property {number} TYPE_UINT32=13 TYPE_UINT32 value - * @property {number} TYPE_ENUM=14 TYPE_ENUM value - * @property {number} TYPE_SFIXED32=15 TYPE_SFIXED32 value - * @property {number} TYPE_SFIXED64=16 TYPE_SFIXED64 value - * @property {number} TYPE_SINT32=17 TYPE_SINT32 value - * @property {number} TYPE_SINT64=18 TYPE_SINT64 value - */ - FieldDescriptorProto.Type = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "TYPE_DOUBLE"] = 1; - values[valuesById[2] = "TYPE_FLOAT"] = 2; - values[valuesById[3] = "TYPE_INT64"] = 3; - values[valuesById[4] = "TYPE_UINT64"] = 4; - values[valuesById[5] = "TYPE_INT32"] = 5; - values[valuesById[6] = "TYPE_FIXED64"] = 6; - values[valuesById[7] = "TYPE_FIXED32"] = 7; - values[valuesById[8] = "TYPE_BOOL"] = 8; - values[valuesById[9] = "TYPE_STRING"] = 9; - values[valuesById[10] = "TYPE_GROUP"] = 10; - values[valuesById[11] = "TYPE_MESSAGE"] = 11; - values[valuesById[12] = "TYPE_BYTES"] = 12; - values[valuesById[13] = "TYPE_UINT32"] = 13; - values[valuesById[14] = "TYPE_ENUM"] = 14; - values[valuesById[15] = "TYPE_SFIXED32"] = 15; - values[valuesById[16] = "TYPE_SFIXED64"] = 16; - values[valuesById[17] = "TYPE_SINT32"] = 17; - values[valuesById[18] = "TYPE_SINT64"] = 18; - return values; - })(); - - /** - * Label enum. - * @name google.protobuf.FieldDescriptorProto.Label + * IdempotencyLevel enum. + * @name google.protobuf.MethodOptions.IdempotencyLevel * @enum {number} - * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value - * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value - * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value + * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value + * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value + * @property {number} IDEMPOTENT=2 IDEMPOTENT value */ - FieldDescriptorProto.Label = (function() { + MethodOptions.IdempotencyLevel = (function() { var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "LABEL_OPTIONAL"] = 1; - values[valuesById[2] = "LABEL_REQUIRED"] = 2; - values[valuesById[3] = "LABEL_REPEATED"] = 3; + values[valuesById[0] = "IDEMPOTENCY_UNKNOWN"] = 0; + values[valuesById[1] = "NO_SIDE_EFFECTS"] = 1; + values[valuesById[2] = "IDEMPOTENT"] = 2; return values; })(); - return FieldDescriptorProto; + return MethodOptions; })(); - protobuf.OneofDescriptorProto = (function() { + protobuf.UninterpretedOption = (function() { /** - * Properties of an OneofDescriptorProto. + * Properties of an UninterpretedOption. * @memberof google.protobuf - * @interface IOneofDescriptorProto - * @property {string|null} [name] OneofDescriptorProto name - * @property {google.protobuf.IOneofOptions|null} [options] OneofDescriptorProto options + * @interface IUninterpretedOption + * @property {Array.|null} [name] UninterpretedOption name + * @property {string|null} [identifierValue] UninterpretedOption identifierValue + * @property {number|Long|null} [positiveIntValue] UninterpretedOption positiveIntValue + * @property {number|Long|null} [negativeIntValue] UninterpretedOption negativeIntValue + * @property {number|null} [doubleValue] UninterpretedOption doubleValue + * @property {Uint8Array|null} [stringValue] UninterpretedOption stringValue + * @property {string|null} [aggregateValue] UninterpretedOption aggregateValue */ /** - * Constructs a new OneofDescriptorProto. + * Constructs a new UninterpretedOption. * @memberof google.protobuf - * @classdesc Represents an OneofDescriptorProto. - * @implements IOneofDescriptorProto + * @classdesc Represents an UninterpretedOption. + * @implements IUninterpretedOption * @constructor - * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + */ + function UninterpretedOption(properties) { + this.name = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UninterpretedOption name. + * @member {Array.} name + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.name = $util.emptyArray; + + /** + * UninterpretedOption identifierValue. + * @member {string} identifierValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.identifierValue = ""; + + /** + * UninterpretedOption positiveIntValue. + * @member {number|Long} positiveIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.positiveIntValue = $util.Long ? $util.Long.fromBits(0,0,true) : 0; + + /** + * UninterpretedOption negativeIntValue. + * @member {number|Long} negativeIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.negativeIntValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * UninterpretedOption doubleValue. + * @member {number} doubleValue + * @memberof google.protobuf.UninterpretedOption + * @instance */ - function OneofDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + UninterpretedOption.prototype.doubleValue = 0; /** - * OneofDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.OneofDescriptorProto + * UninterpretedOption stringValue. + * @member {Uint8Array} stringValue + * @memberof google.protobuf.UninterpretedOption * @instance */ - OneofDescriptorProto.prototype.name = ""; + UninterpretedOption.prototype.stringValue = $util.newBuffer([]); /** - * OneofDescriptorProto options. - * @member {google.protobuf.IOneofOptions|null|undefined} options - * @memberof google.protobuf.OneofDescriptorProto + * UninterpretedOption aggregateValue. + * @member {string} aggregateValue + * @memberof google.protobuf.UninterpretedOption * @instance */ - OneofDescriptorProto.prototype.options = null; + UninterpretedOption.prototype.aggregateValue = ""; /** - * Creates a new OneofDescriptorProto instance using the specified properties. + * Creates a new UninterpretedOption instance using the specified properties. * @function create - * @memberof google.protobuf.OneofDescriptorProto + * @memberof google.protobuf.UninterpretedOption * @static - * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto instance + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption instance */ - OneofDescriptorProto.create = function create(properties) { - return new OneofDescriptorProto(properties); + UninterpretedOption.create = function create(properties) { + return new UninterpretedOption(properties); }; /** - * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. * @function encode - * @memberof google.protobuf.OneofDescriptorProto + * @memberof google.protobuf.UninterpretedOption * @static - * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - OneofDescriptorProto.encode = function encode(message, writer) { + UninterpretedOption.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.name != null && message.name.length) + for (var i = 0; i < message.name.length; ++i) + $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); + if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) + writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); + if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) + writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) + writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); + if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); return writer; }; /** - * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.OneofDescriptorProto + * @memberof google.protobuf.UninterpretedOption * @static - * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - OneofDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + UninterpretedOption.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * Decodes an UninterpretedOption message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.OneofDescriptorProto + * @memberof google.protobuf.UninterpretedOption * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - OneofDescriptorProto.decode = function decode(reader, length) { + UninterpretedOption.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; case 2: - message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); + if (!(message.name && message.name.length)) + message.name = []; + message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = reader.uint64(); + break; + case 5: + message.negativeIntValue = reader.int64(); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); break; default: reader.skipType(tag & 7); break; } } - return message; + return message; + }; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UninterpretedOption.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an UninterpretedOption message. + * @function verify + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UninterpretedOption.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) { + if (!Array.isArray(message.name)) + return "name: array expected"; + for (var i = 0; i < message.name.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.NamePart.verify(message.name[i]); + if (error) + return "name." + error; + } + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + if (!$util.isString(message.identifierValue)) + return "identifierValue: string expected"; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (!$util.isInteger(message.positiveIntValue) && !(message.positiveIntValue && $util.isInteger(message.positiveIntValue.low) && $util.isInteger(message.positiveIntValue.high))) + return "positiveIntValue: integer|Long expected"; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (!$util.isInteger(message.negativeIntValue) && !(message.negativeIntValue && $util.isInteger(message.negativeIntValue.low) && $util.isInteger(message.negativeIntValue.high))) + return "negativeIntValue: integer|Long expected"; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + if (typeof message.doubleValue !== "number") + return "doubleValue: number expected"; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (!(message.stringValue && typeof message.stringValue.length === "number" || $util.isString(message.stringValue))) + return "stringValue: buffer expected"; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + if (!$util.isString(message.aggregateValue)) + return "aggregateValue: string expected"; + return null; + }; + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + */ + UninterpretedOption.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption) + return object; + var message = new $root.google.protobuf.UninterpretedOption(); + if (object.name) { + if (!Array.isArray(object.name)) + throw TypeError(".google.protobuf.UninterpretedOption.name: array expected"); + message.name = []; + for (var i = 0; i < object.name.length; ++i) { + if (typeof object.name[i] !== "object") + throw TypeError(".google.protobuf.UninterpretedOption.name: object expected"); + message.name[i] = $root.google.protobuf.UninterpretedOption.NamePart.fromObject(object.name[i]); + } + } + if (object.identifierValue != null) + message.identifierValue = String(object.identifierValue); + if (object.positiveIntValue != null) + if ($util.Long) + (message.positiveIntValue = $util.Long.fromValue(object.positiveIntValue)).unsigned = true; + else if (typeof object.positiveIntValue === "string") + message.positiveIntValue = parseInt(object.positiveIntValue, 10); + else if (typeof object.positiveIntValue === "number") + message.positiveIntValue = object.positiveIntValue; + else if (typeof object.positiveIntValue === "object") + message.positiveIntValue = new $util.LongBits(object.positiveIntValue.low >>> 0, object.positiveIntValue.high >>> 0).toNumber(true); + if (object.negativeIntValue != null) + if ($util.Long) + (message.negativeIntValue = $util.Long.fromValue(object.negativeIntValue)).unsigned = false; + else if (typeof object.negativeIntValue === "string") + message.negativeIntValue = parseInt(object.negativeIntValue, 10); + else if (typeof object.negativeIntValue === "number") + message.negativeIntValue = object.negativeIntValue; + else if (typeof object.negativeIntValue === "object") + message.negativeIntValue = new $util.LongBits(object.negativeIntValue.low >>> 0, object.negativeIntValue.high >>> 0).toNumber(); + if (object.doubleValue != null) + message.doubleValue = Number(object.doubleValue); + if (object.stringValue != null) + if (typeof object.stringValue === "string") + $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); + else if (object.stringValue.length) + message.stringValue = object.stringValue; + if (object.aggregateValue != null) + message.aggregateValue = String(object.aggregateValue); + return message; + }; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.UninterpretedOption} message UninterpretedOption + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UninterpretedOption.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.name = []; + if (options.defaults) { + object.identifierValue = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, true); + object.positiveIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.positiveIntValue = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.negativeIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.negativeIntValue = options.longs === String ? "0" : 0; + object.doubleValue = 0; + if (options.bytes === String) + object.stringValue = ""; + else { + object.stringValue = []; + if (options.bytes !== Array) + object.stringValue = $util.newBuffer(object.stringValue); + } + object.aggregateValue = ""; + } + if (message.name && message.name.length) { + object.name = []; + for (var j = 0; j < message.name.length; ++j) + object.name[j] = $root.google.protobuf.UninterpretedOption.NamePart.toObject(message.name[j], options); + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + object.identifierValue = message.identifierValue; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (typeof message.positiveIntValue === "number") + object.positiveIntValue = options.longs === String ? String(message.positiveIntValue) : message.positiveIntValue; + else + object.positiveIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.positiveIntValue) : options.longs === Number ? new $util.LongBits(message.positiveIntValue.low >>> 0, message.positiveIntValue.high >>> 0).toNumber(true) : message.positiveIntValue; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (typeof message.negativeIntValue === "number") + object.negativeIntValue = options.longs === String ? String(message.negativeIntValue) : message.negativeIntValue; + else + object.negativeIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.negativeIntValue) : options.longs === Number ? new $util.LongBits(message.negativeIntValue.low >>> 0, message.negativeIntValue.high >>> 0).toNumber() : message.negativeIntValue; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + object.stringValue = options.bytes === String ? $util.base64.encode(message.stringValue, 0, message.stringValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.stringValue) : message.stringValue; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + object.aggregateValue = message.aggregateValue; + return object; }; /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * Converts this UninterpretedOption to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption + * @instance + * @returns {Object.} JSON object */ - OneofDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); + UninterpretedOption.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - /** - * Verifies an OneofDescriptorProto message. - * @function verify - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - OneofDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.OneofOptions.verify(message.options); - if (error) - return "options." + error; + UninterpretedOption.NamePart = (function() { + + /** + * Properties of a NamePart. + * @memberof google.protobuf.UninterpretedOption + * @interface INamePart + * @property {string} namePart NamePart namePart + * @property {boolean} isExtension NamePart isExtension + */ + + /** + * Constructs a new NamePart. + * @memberof google.protobuf.UninterpretedOption + * @classdesc Represents a NamePart. + * @implements INamePart + * @constructor + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + */ + function NamePart(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; } - return null; - }; - /** - * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto - */ - OneofDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.OneofDescriptorProto) + /** + * NamePart namePart. + * @member {string} namePart + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.namePart = ""; + + /** + * NamePart isExtension. + * @member {boolean} isExtension + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.isExtension = false; + + /** + * Creates a new NamePart instance using the specified properties. + * @function create + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart instance + */ + NamePart.create = function create(properties) { + return new NamePart(properties); + }; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + writer.uint32(/* id 1, wireType 2 =*/10).string(message.namePart); + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.isExtension); + return writer; + }; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + if (!message.hasOwnProperty("namePart")) + throw $util.ProtocolError("missing required 'namePart'", { instance: message }); + if (!message.hasOwnProperty("isExtension")) + throw $util.ProtocolError("missing required 'isExtension'", { instance: message }); + return message; + }; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a NamePart message. + * @function verify + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + NamePart.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (!$util.isString(message.namePart)) + return "namePart: string expected"; + if (typeof message.isExtension !== "boolean") + return "isExtension: boolean expected"; + return null; + }; + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + */ + NamePart.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) + return object; + var message = new $root.google.protobuf.UninterpretedOption.NamePart(); + if (object.namePart != null) + message.namePart = String(object.namePart); + if (object.isExtension != null) + message.isExtension = Boolean(object.isExtension); + return message; + }; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + NamePart.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.namePart = ""; + object.isExtension = false; + } + if (message.namePart != null && message.hasOwnProperty("namePart")) + object.namePart = message.namePart; + if (message.isExtension != null && message.hasOwnProperty("isExtension")) + object.isExtension = message.isExtension; return object; - var message = new $root.google.protobuf.OneofDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.OneofOptions.fromObject(object.options); - } - return message; - }; + }; - /** - * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {google.protobuf.OneofDescriptorProto} message OneofDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - OneofDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.OneofOptions.toObject(message.options, options); - return object; - }; + /** + * Converts this NamePart to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + * @returns {Object.} JSON object + */ + NamePart.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this OneofDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.OneofDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - OneofDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + return NamePart; + })(); - return OneofDescriptorProto; + return UninterpretedOption; })(); - protobuf.EnumDescriptorProto = (function() { + protobuf.SourceCodeInfo = (function() { /** - * Properties of an EnumDescriptorProto. + * Properties of a SourceCodeInfo. * @memberof google.protobuf - * @interface IEnumDescriptorProto - * @property {string|null} [name] EnumDescriptorProto name - * @property {Array.|null} [value] EnumDescriptorProto value - * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options - * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange - * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + * @interface ISourceCodeInfo + * @property {Array.|null} [location] SourceCodeInfo location */ /** - * Constructs a new EnumDescriptorProto. + * Constructs a new SourceCodeInfo. * @memberof google.protobuf - * @classdesc Represents an EnumDescriptorProto. - * @implements IEnumDescriptorProto + * @classdesc Represents a SourceCodeInfo. + * @implements ISourceCodeInfo * @constructor - * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set */ - function EnumDescriptorProto(properties) { - this.value = []; - this.reservedRange = []; - this.reservedName = []; + function SourceCodeInfo(properties) { + this.location = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -13978,136 +21917,78 @@ } /** - * EnumDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.name = ""; - - /** - * EnumDescriptorProto value. - * @member {Array.} value - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.value = $util.emptyArray; - - /** - * EnumDescriptorProto options. - * @member {google.protobuf.IEnumOptions|null|undefined} options - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.options = null; - - /** - * EnumDescriptorProto reservedRange. - * @member {Array.} reservedRange - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.reservedRange = $util.emptyArray; - - /** - * EnumDescriptorProto reservedName. - * @member {Array.} reservedName - * @memberof google.protobuf.EnumDescriptorProto + * SourceCodeInfo location. + * @member {Array.} location + * @memberof google.protobuf.SourceCodeInfo * @instance */ - EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + SourceCodeInfo.prototype.location = $util.emptyArray; /** - * Creates a new EnumDescriptorProto instance using the specified properties. + * Creates a new SourceCodeInfo instance using the specified properties. * @function create - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static - * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto instance + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo instance */ - EnumDescriptorProto.create = function create(properties) { - return new EnumDescriptorProto(properties); + SourceCodeInfo.create = function create(properties) { + return new SourceCodeInfo(properties); }; /** - * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. * @function encode - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static - * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumDescriptorProto.encode = function encode(message, writer) { + SourceCodeInfo.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.value != null && message.value.length) - for (var i = 0; i < message.value.length; ++i) - $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.reservedRange != null && message.reservedRange.length) - for (var i = 0; i < message.reservedRange.length; ++i) - $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.reservedName != null && message.reservedName.length) - for (var i = 0; i < message.reservedName.length; ++i) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + if (message.location != null && message.location.length) + for (var i = 0; i < message.location.length; ++i) + $root.google.protobuf.SourceCodeInfo.Location.encode(message.location[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; /** - * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static - * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + SourceCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * Decodes a SourceCodeInfo message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumDescriptorProto.decode = function decode(reader, length) { + SourceCodeInfo.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.value && message.value.length)) - message.value = []; - message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); - break; - case 4: - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); + if (!(message.location && message.location.length)) + message.location = []; + message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -14118,191 +21999,128 @@ }; /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + SourceCodeInfo.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; - - /** - * Verifies an EnumDescriptorProto message. - * @function verify - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.value != null && message.hasOwnProperty("value")) { - if (!Array.isArray(message.value)) - return "value: array expected"; - for (var i = 0; i < message.value.length; ++i) { - var error = $root.google.protobuf.EnumValueDescriptorProto.verify(message.value[i]); - if (error) - return "value." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.EnumOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { - if (!Array.isArray(message.reservedRange)) - return "reservedRange: array expected"; - for (var i = 0; i < message.reservedRange.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.verify(message.reservedRange[i]); + + /** + * Verifies a SourceCodeInfo message. + * @function verify + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SourceCodeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.location != null && message.hasOwnProperty("location")) { + if (!Array.isArray(message.location)) + return "location: array expected"; + for (var i = 0; i < message.location.length; ++i) { + var error = $root.google.protobuf.SourceCodeInfo.Location.verify(message.location[i]); if (error) - return "reservedRange." + error; + return "location." + error; } } - if (message.reservedName != null && message.hasOwnProperty("reservedName")) { - if (!Array.isArray(message.reservedName)) - return "reservedName: array expected"; - for (var i = 0; i < message.reservedName.length; ++i) - if (!$util.isString(message.reservedName[i])) - return "reservedName: string[] expected"; - } return null; }; /** - * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static * @param {Object.} object Plain object - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo */ - EnumDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumDescriptorProto) + SourceCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo) return object; - var message = new $root.google.protobuf.EnumDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.value) { - if (!Array.isArray(object.value)) - throw TypeError(".google.protobuf.EnumDescriptorProto.value: array expected"); - message.value = []; - for (var i = 0; i < object.value.length; ++i) { - if (typeof object.value[i] !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.value: object expected"); - message.value[i] = $root.google.protobuf.EnumValueDescriptorProto.fromObject(object.value[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.EnumOptions.fromObject(object.options); - } - if (object.reservedRange) { - if (!Array.isArray(object.reservedRange)) - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: array expected"); - message.reservedRange = []; - for (var i = 0; i < object.reservedRange.length; ++i) { - if (typeof object.reservedRange[i] !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: object expected"); - message.reservedRange[i] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.fromObject(object.reservedRange[i]); + var message = new $root.google.protobuf.SourceCodeInfo(); + if (object.location) { + if (!Array.isArray(object.location)) + throw TypeError(".google.protobuf.SourceCodeInfo.location: array expected"); + message.location = []; + for (var i = 0; i < object.location.length; ++i) { + if (typeof object.location[i] !== "object") + throw TypeError(".google.protobuf.SourceCodeInfo.location: object expected"); + message.location[i] = $root.google.protobuf.SourceCodeInfo.Location.fromObject(object.location[i]); } } - if (object.reservedName) { - if (!Array.isArray(object.reservedName)) - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedName: array expected"); - message.reservedName = []; - for (var i = 0; i < object.reservedName.length; ++i) - message.reservedName[i] = String(object.reservedName[i]); - } return message; }; /** - * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @static - * @param {google.protobuf.EnumDescriptorProto} message EnumDescriptorProto + * @param {google.protobuf.SourceCodeInfo} message SourceCodeInfo * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - EnumDescriptorProto.toObject = function toObject(message, options) { + SourceCodeInfo.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.value = []; - object.reservedRange = []; - object.reservedName = []; - } - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.value && message.value.length) { - object.value = []; - for (var j = 0; j < message.value.length; ++j) - object.value[j] = $root.google.protobuf.EnumValueDescriptorProto.toObject(message.value[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.EnumOptions.toObject(message.options, options); - if (message.reservedRange && message.reservedRange.length) { - object.reservedRange = []; - for (var j = 0; j < message.reservedRange.length; ++j) - object.reservedRange[j] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.toObject(message.reservedRange[j], options); - } - if (message.reservedName && message.reservedName.length) { - object.reservedName = []; - for (var j = 0; j < message.reservedName.length; ++j) - object.reservedName[j] = message.reservedName[j]; + if (options.arrays || options.defaults) + object.location = []; + if (message.location && message.location.length) { + object.location = []; + for (var j = 0; j < message.location.length; ++j) + object.location[j] = $root.google.protobuf.SourceCodeInfo.Location.toObject(message.location[j], options); } return object; }; /** - * Converts this EnumDescriptorProto to JSON. + * Converts this SourceCodeInfo to JSON. * @function toJSON - * @memberof google.protobuf.EnumDescriptorProto + * @memberof google.protobuf.SourceCodeInfo * @instance * @returns {Object.} JSON object */ - EnumDescriptorProto.prototype.toJSON = function toJSON() { + SourceCodeInfo.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - EnumDescriptorProto.EnumReservedRange = (function() { + SourceCodeInfo.Location = (function() { /** - * Properties of an EnumReservedRange. - * @memberof google.protobuf.EnumDescriptorProto - * @interface IEnumReservedRange - * @property {number|null} [start] EnumReservedRange start - * @property {number|null} [end] EnumReservedRange end + * Properties of a Location. + * @memberof google.protobuf.SourceCodeInfo + * @interface ILocation + * @property {Array.|null} [path] Location path + * @property {Array.|null} [span] Location span + * @property {string|null} [leadingComments] Location leadingComments + * @property {string|null} [trailingComments] Location trailingComments + * @property {Array.|null} [leadingDetachedComments] Location leadingDetachedComments */ /** - * Constructs a new EnumReservedRange. - * @memberof google.protobuf.EnumDescriptorProto - * @classdesc Represents an EnumReservedRange. - * @implements IEnumReservedRange + * Constructs a new Location. + * @memberof google.protobuf.SourceCodeInfo + * @classdesc Represents a Location. + * @implements ILocation * @constructor - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set */ - function EnumReservedRange(properties) { + function Location(properties) { + this.path = []; + this.span = []; + this.leadingDetachedComments = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -14310,88 +22128,152 @@ } /** - * EnumReservedRange start. - * @member {number} start - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * Location path. + * @member {Array.} path + * @memberof google.protobuf.SourceCodeInfo.Location * @instance */ - EnumReservedRange.prototype.start = 0; + Location.prototype.path = $util.emptyArray; /** - * EnumReservedRange end. - * @member {number} end - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * Location span. + * @member {Array.} span + * @memberof google.protobuf.SourceCodeInfo.Location * @instance */ - EnumReservedRange.prototype.end = 0; + Location.prototype.span = $util.emptyArray; /** - * Creates a new EnumReservedRange instance using the specified properties. + * Location leadingComments. + * @member {string} leadingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingComments = ""; + + /** + * Location trailingComments. + * @member {string} trailingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.trailingComments = ""; + + /** + * Location leadingDetachedComments. + * @member {Array.} leadingDetachedComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingDetachedComments = $util.emptyArray; + + /** + * Creates a new Location instance using the specified properties. * @function create - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange instance + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo.Location} Location instance */ - EnumReservedRange.create = function create(properties) { - return new EnumReservedRange(properties); + Location.create = function create(properties) { + return new Location(properties); }; /** - * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. * @function encode - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumReservedRange.encode = function encode(message, writer) { + Location.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.span != null && message.span.length) { + writer.uint32(/* id 2, wireType 2 =*/18).fork(); + for (var i = 0; i < message.span.length; ++i) + writer.int32(message.span[i]); + writer.ldelim(); + } + if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); + if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); + if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.leadingDetachedComments[i]); return writer; }; /** - * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + Location.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an EnumReservedRange message from the specified reader or buffer. + * Decodes a Location message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @returns {google.protobuf.SourceCodeInfo.Location} Location * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumReservedRange.decode = function decode(reader, length) { + Location.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.start = reader.int32(); + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); break; case 2: - message.end = reader.int32(); + if (!(message.span && message.span.length)) + message.span = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.span.push(reader.int32()); + } else + message.span.push(reader.int32()); + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) + message.leadingDetachedComments = []; + message.leadingDetachedComments.push(reader.string()); break; default: reader.skipType(tag & 7); @@ -14402,359 +22284,182 @@ }; /** - * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * Decodes a Location message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @returns {google.protobuf.SourceCodeInfo.Location} Location * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumReservedRange.decodeDelimited = function decodeDelimited(reader) { + Location.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an EnumReservedRange message. + * Verifies a Location message. * @function verify - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @memberof google.protobuf.SourceCodeInfo.Location * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - EnumReservedRange.verify = function verify(message) { + Location.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - return null; - }; - - /** - * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange - */ - EnumReservedRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumDescriptorProto.EnumReservedRange) - return object; - var message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - return message; - }; - - /** - * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {google.protobuf.EnumDescriptorProto.EnumReservedRange} message EnumReservedRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumReservedRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - return object; - }; - - /** - * Converts this EnumReservedRange to JSON. - * @function toJSON - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @instance - * @returns {Object.} JSON object - */ - EnumReservedRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - return EnumReservedRange; - })(); - - return EnumDescriptorProto; - })(); - - protobuf.EnumValueDescriptorProto = (function() { - - /** - * Properties of an EnumValueDescriptorProto. - * @memberof google.protobuf - * @interface IEnumValueDescriptorProto - * @property {string|null} [name] EnumValueDescriptorProto name - * @property {number|null} [number] EnumValueDescriptorProto number - * @property {google.protobuf.IEnumValueOptions|null} [options] EnumValueDescriptorProto options - */ - - /** - * Constructs a new EnumValueDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents an EnumValueDescriptorProto. - * @implements IEnumValueDescriptorProto - * @constructor - * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set - */ - function EnumValueDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumValueDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.name = ""; - - /** - * EnumValueDescriptorProto number. - * @member {number} number - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.number = 0; - - /** - * EnumValueDescriptorProto options. - * @member {google.protobuf.IEnumValueOptions|null|undefined} options - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.options = null; - - /** - * Creates a new EnumValueDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto instance - */ - EnumValueDescriptorProto.create = function create(properties) { - return new EnumValueDescriptorProto(properties); - }; - - /** - * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.number != null && Object.hasOwnProperty.call(message, "number")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.number = reader.int32(); - break; - case 3: - message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; + if (message.span != null && message.hasOwnProperty("span")) { + if (!Array.isArray(message.span)) + return "span: array expected"; + for (var i = 0; i < message.span.length; ++i) + if (!$util.isInteger(message.span[i])) + return "span: integer[] expected"; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + if (!$util.isString(message.leadingComments)) + return "leadingComments: string expected"; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + if (!$util.isString(message.trailingComments)) + return "trailingComments: string expected"; + if (message.leadingDetachedComments != null && message.hasOwnProperty("leadingDetachedComments")) { + if (!Array.isArray(message.leadingDetachedComments)) + return "leadingDetachedComments: array expected"; + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + if (!$util.isString(message.leadingDetachedComments[i])) + return "leadingDetachedComments: string[] expected"; } - } - return message; - }; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + return null; + }; - /** - * Verifies an EnumValueDescriptorProto message. - * @function verify - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumValueDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.number != null && message.hasOwnProperty("number")) - if (!$util.isInteger(message.number)) - return "number: integer expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.EnumValueOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.SourceCodeInfo.Location} Location + */ + Location.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo.Location) + return object; + var message = new $root.google.protobuf.SourceCodeInfo.Location(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.span) { + if (!Array.isArray(object.span)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.span: array expected"); + message.span = []; + for (var i = 0; i < object.span.length; ++i) + message.span[i] = object.span[i] | 0; + } + if (object.leadingComments != null) + message.leadingComments = String(object.leadingComments); + if (object.trailingComments != null) + message.trailingComments = String(object.trailingComments); + if (object.leadingDetachedComments) { + if (!Array.isArray(object.leadingDetachedComments)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.leadingDetachedComments: array expected"); + message.leadingDetachedComments = []; + for (var i = 0; i < object.leadingDetachedComments.length; ++i) + message.leadingDetachedComments[i] = String(object.leadingDetachedComments[i]); + } + return message; + }; - /** - * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - */ - EnumValueDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumValueDescriptorProto) + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.Location} message Location + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Location.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.path = []; + object.span = []; + object.leadingDetachedComments = []; + } + if (options.defaults) { + object.leadingComments = ""; + object.trailingComments = ""; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.span && message.span.length) { + object.span = []; + for (var j = 0; j < message.span.length; ++j) + object.span[j] = message.span[j]; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + object.leadingComments = message.leadingComments; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + object.trailingComments = message.trailingComments; + if (message.leadingDetachedComments && message.leadingDetachedComments.length) { + object.leadingDetachedComments = []; + for (var j = 0; j < message.leadingDetachedComments.length; ++j) + object.leadingDetachedComments[j] = message.leadingDetachedComments[j]; + } return object; - var message = new $root.google.protobuf.EnumValueDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.number != null) - message.number = object.number | 0; - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.EnumValueOptions.fromObject(object.options); - } - return message; - }; + }; - /** - * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.EnumValueDescriptorProto} message EnumValueDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumValueDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.number = 0; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.number != null && message.hasOwnProperty("number")) - object.number = message.number; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.EnumValueOptions.toObject(message.options, options); - return object; - }; + /** + * Converts this Location to JSON. + * @function toJSON + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + * @returns {Object.} JSON object + */ + Location.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this EnumValueDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - EnumValueDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + return Location; + })(); - return EnumValueDescriptorProto; + return SourceCodeInfo; })(); - protobuf.ServiceDescriptorProto = (function() { + protobuf.GeneratedCodeInfo = (function() { /** - * Properties of a ServiceDescriptorProto. + * Properties of a GeneratedCodeInfo. * @memberof google.protobuf - * @interface IServiceDescriptorProto - * @property {string|null} [name] ServiceDescriptorProto name - * @property {Array.|null} [method] ServiceDescriptorProto method - * @property {google.protobuf.IServiceOptions|null} [options] ServiceDescriptorProto options + * @interface IGeneratedCodeInfo + * @property {Array.|null} [annotation] GeneratedCodeInfo annotation */ /** - * Constructs a new ServiceDescriptorProto. + * Constructs a new GeneratedCodeInfo. * @memberof google.protobuf - * @classdesc Represents a ServiceDescriptorProto. - * @implements IServiceDescriptorProto + * @classdesc Represents a GeneratedCodeInfo. + * @implements IGeneratedCodeInfo * @constructor - * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set */ - function ServiceDescriptorProto(properties) { - this.method = []; + function GeneratedCodeInfo(properties) { + this.annotation = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -14762,104 +22467,78 @@ } /** - * ServiceDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - */ - ServiceDescriptorProto.prototype.name = ""; - - /** - * ServiceDescriptorProto method. - * @member {Array.} method - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - */ - ServiceDescriptorProto.prototype.method = $util.emptyArray; - - /** - * ServiceDescriptorProto options. - * @member {google.protobuf.IServiceOptions|null|undefined} options - * @memberof google.protobuf.ServiceDescriptorProto + * GeneratedCodeInfo annotation. + * @member {Array.} annotation + * @memberof google.protobuf.GeneratedCodeInfo * @instance */ - ServiceDescriptorProto.prototype.options = null; + GeneratedCodeInfo.prototype.annotation = $util.emptyArray; /** - * Creates a new ServiceDescriptorProto instance using the specified properties. + * Creates a new GeneratedCodeInfo instance using the specified properties. * @function create - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static - * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto instance + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo instance */ - ServiceDescriptorProto.create = function create(properties) { - return new ServiceDescriptorProto(properties); + GeneratedCodeInfo.create = function create(properties) { + return new GeneratedCodeInfo(properties); }; /** - * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. * @function encode - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static - * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ServiceDescriptorProto.encode = function encode(message, writer) { + GeneratedCodeInfo.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.method != null && message.method.length) - for (var i = 0; i < message.method.length; ++i) - $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.annotation != null && message.annotation.length) + for (var i = 0; i < message.annotation.length; ++i) + $root.google.protobuf.GeneratedCodeInfo.Annotation.encode(message.annotation[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); return writer; }; /** - * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static - * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ServiceDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + GeneratedCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceDescriptorProto.decode = function decode(reader, length) { + GeneratedCodeInfo.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.method && message.method.length)) - message.method = []; - message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); + if (!(message.annotation && message.annotation.length)) + message.annotation = []; + message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -14870,473 +22549,404 @@ }; /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + GeneratedCodeInfo.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ServiceDescriptorProto message. + * Verifies a GeneratedCodeInfo message. * @function verify - * @memberof google.protobuf.ServiceDescriptorProto + * @memberof google.protobuf.GeneratedCodeInfo * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ServiceDescriptorProto.verify = function verify(message) { + GeneratedCodeInfo.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.method != null && message.hasOwnProperty("method")) { - if (!Array.isArray(message.method)) - return "method: array expected"; - for (var i = 0; i < message.method.length; ++i) { - var error = $root.google.protobuf.MethodDescriptorProto.verify(message.method[i]); + if (message.annotation != null && message.hasOwnProperty("annotation")) { + if (!Array.isArray(message.annotation)) + return "annotation: array expected"; + for (var i = 0; i < message.annotation.length; ++i) { + var error = $root.google.protobuf.GeneratedCodeInfo.Annotation.verify(message.annotation[i]); if (error) - return "method." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.ServiceOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; - - /** - * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto - */ - ServiceDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ServiceDescriptorProto) - return object; - var message = new $root.google.protobuf.ServiceDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.method) { - if (!Array.isArray(object.method)) - throw TypeError(".google.protobuf.ServiceDescriptorProto.method: array expected"); - message.method = []; - for (var i = 0; i < object.method.length; ++i) { - if (typeof object.method[i] !== "object") - throw TypeError(".google.protobuf.ServiceDescriptorProto.method: object expected"); - message.method[i] = $root.google.protobuf.MethodDescriptorProto.fromObject(object.method[i]); + return "annotation." + error; } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.ServiceDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.ServiceOptions.fromObject(object.options); - } - return message; - }; - - /** - * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {google.protobuf.ServiceDescriptorProto} message ServiceDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ServiceDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.method = []; - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.method && message.method.length) { - object.method = []; - for (var j = 0; j < message.method.length; ++j) - object.method[j] = $root.google.protobuf.MethodDescriptorProto.toObject(message.method[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.ServiceOptions.toObject(message.options, options); - return object; - }; - - /** - * Converts this ServiceDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - ServiceDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - return ServiceDescriptorProto; - })(); - - protobuf.MethodDescriptorProto = (function() { - - /** - * Properties of a MethodDescriptorProto. - * @memberof google.protobuf - * @interface IMethodDescriptorProto - * @property {string|null} [name] MethodDescriptorProto name - * @property {string|null} [inputType] MethodDescriptorProto inputType - * @property {string|null} [outputType] MethodDescriptorProto outputType - * @property {google.protobuf.IMethodOptions|null} [options] MethodDescriptorProto options - * @property {boolean|null} [clientStreaming] MethodDescriptorProto clientStreaming - * @property {boolean|null} [serverStreaming] MethodDescriptorProto serverStreaming - */ - - /** - * Constructs a new MethodDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a MethodDescriptorProto. - * @implements IMethodDescriptorProto - * @constructor - * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set - */ - function MethodDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * MethodDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.name = ""; - - /** - * MethodDescriptorProto inputType. - * @member {string} inputType - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.inputType = ""; - - /** - * MethodDescriptorProto outputType. - * @member {string} outputType - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.outputType = ""; - - /** - * MethodDescriptorProto options. - * @member {google.protobuf.IMethodOptions|null|undefined} options - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.options = null; - - /** - * MethodDescriptorProto clientStreaming. - * @member {boolean} clientStreaming - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.clientStreaming = false; - - /** - * MethodDescriptorProto serverStreaming. - * @member {boolean} serverStreaming - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.serverStreaming = false; - - /** - * Creates a new MethodDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto instance - */ - MethodDescriptorProto.create = function create(properties) { - return new MethodDescriptorProto(properties); + } + return null; }; /** - * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.MethodDescriptorProto + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo * @static - * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo */ - MethodDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); - if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) - writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); - if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) - writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); - return writer; + GeneratedCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo(); + if (object.annotation) { + if (!Array.isArray(object.annotation)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: array expected"); + message.annotation = []; + for (var i = 0; i < object.annotation.length; ++i) { + if (typeof object.annotation[i] !== "object") + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: object expected"); + message.annotation[i] = $root.google.protobuf.GeneratedCodeInfo.Annotation.fromObject(object.annotation[i]); + } + } + return message; }; /** - * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.MethodDescriptorProto + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo * @static - * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer + * @param {google.protobuf.GeneratedCodeInfo} message GeneratedCodeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object */ - MethodDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); + GeneratedCodeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.annotation = []; + if (message.annotation && message.annotation.length) { + object.annotation = []; + for (var j = 0; j < message.annotation.length; ++j) + object.annotation[j] = $root.google.protobuf.GeneratedCodeInfo.Annotation.toObject(message.annotation[j], options); + } + return object; }; /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * Converts this GeneratedCodeInfo to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo + * @instance + * @returns {Object.} JSON object */ - MethodDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.inputType = reader.string(); - break; - case 3: - message.outputType = reader.string(); - break; - case 4: - message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); - break; - case 5: - message.clientStreaming = reader.bool(); - break; - case 6: - message.serverStreaming = reader.bool(); - break; - default: - reader.skipType(tag & 7); - break; + GeneratedCodeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + GeneratedCodeInfo.Annotation = (function() { + + /** + * Properties of an Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @interface IAnnotation + * @property {Array.|null} [path] Annotation path + * @property {string|null} [sourceFile] Annotation sourceFile + * @property {number|null} [begin] Annotation begin + * @property {number|null} [end] Annotation end + */ + + /** + * Constructs a new Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @classdesc Represents an Annotation. + * @implements IAnnotation + * @constructor + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + */ + function Annotation(properties) { + this.path = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Annotation path. + * @member {Array.} path + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.path = $util.emptyArray; + + /** + * Annotation sourceFile. + * @member {string} sourceFile + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.sourceFile = ""; + + /** + * Annotation begin. + * @member {number} begin + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.begin = 0; + + /** + * Annotation end. + * @member {number} end + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.end = 0; + + /** + * Creates a new Annotation instance using the specified properties. + * @function create + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation instance + */ + Annotation.create = function create(properties) { + return new Annotation(properties); + }; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); + if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); + return writer; + }; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } } - } - return message; - }; + return message; + }; - /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MethodDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * Verifies a MethodDescriptorProto message. - * @function verify - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - MethodDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.inputType != null && message.hasOwnProperty("inputType")) - if (!$util.isString(message.inputType)) - return "inputType: string expected"; - if (message.outputType != null && message.hasOwnProperty("outputType")) - if (!$util.isString(message.outputType)) - return "outputType: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.MethodOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) - if (typeof message.clientStreaming !== "boolean") - return "clientStreaming: boolean expected"; - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) - if (typeof message.serverStreaming !== "boolean") - return "serverStreaming: boolean expected"; - return null; - }; + /** + * Verifies an Annotation message. + * @function verify + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Annotation.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + if (!$util.isString(message.sourceFile)) + return "sourceFile: string expected"; + if (message.begin != null && message.hasOwnProperty("begin")) + if (!$util.isInteger(message.begin)) + return "begin: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; - /** - * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - */ - MethodDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MethodDescriptorProto) + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + */ + Annotation.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo.Annotation) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.Annotation.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.sourceFile != null) + message.sourceFile = String(object.sourceFile); + if (object.begin != null) + message.begin = object.begin | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.Annotation} message Annotation + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Annotation.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.path = []; + if (options.defaults) { + object.sourceFile = ""; + object.begin = 0; + object.end = 0; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + object.sourceFile = message.sourceFile; + if (message.begin != null && message.hasOwnProperty("begin")) + object.begin = message.begin; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; return object; - var message = new $root.google.protobuf.MethodDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.inputType != null) - message.inputType = String(object.inputType); - if (object.outputType != null) - message.outputType = String(object.outputType); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.MethodOptions.fromObject(object.options); - } - if (object.clientStreaming != null) - message.clientStreaming = Boolean(object.clientStreaming); - if (object.serverStreaming != null) - message.serverStreaming = Boolean(object.serverStreaming); - return message; - }; + }; - /** - * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.MethodDescriptorProto} message MethodDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - MethodDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.inputType = ""; - object.outputType = ""; - object.options = null; - object.clientStreaming = false; - object.serverStreaming = false; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.inputType != null && message.hasOwnProperty("inputType")) - object.inputType = message.inputType; - if (message.outputType != null && message.hasOwnProperty("outputType")) - object.outputType = message.outputType; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.MethodOptions.toObject(message.options, options); - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) - object.clientStreaming = message.clientStreaming; - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) - object.serverStreaming = message.serverStreaming; - return object; - }; + /** + * Converts this Annotation to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + * @returns {Object.} JSON object + */ + Annotation.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this MethodDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.MethodDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - MethodDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + return Annotation; + })(); - return MethodDescriptorProto; + return GeneratedCodeInfo; })(); - protobuf.FileOptions = (function() { + protobuf.Timestamp = (function() { /** - * Properties of a FileOptions. + * Properties of a Timestamp. * @memberof google.protobuf - * @interface IFileOptions - * @property {string|null} [javaPackage] FileOptions javaPackage - * @property {string|null} [javaOuterClassname] FileOptions javaOuterClassname - * @property {boolean|null} [javaMultipleFiles] FileOptions javaMultipleFiles - * @property {boolean|null} [javaGenerateEqualsAndHash] FileOptions javaGenerateEqualsAndHash - * @property {boolean|null} [javaStringCheckUtf8] FileOptions javaStringCheckUtf8 - * @property {google.protobuf.FileOptions.OptimizeMode|null} [optimizeFor] FileOptions optimizeFor - * @property {string|null} [goPackage] FileOptions goPackage - * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices - * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices - * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices - * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices - * @property {boolean|null} [deprecated] FileOptions deprecated - * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas - * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix - * @property {string|null} [csharpNamespace] FileOptions csharpNamespace - * @property {string|null} [swiftPrefix] FileOptions swiftPrefix - * @property {string|null} [phpClassPrefix] FileOptions phpClassPrefix - * @property {string|null} [phpNamespace] FileOptions phpNamespace - * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace - * @property {string|null} [rubyPackage] FileOptions rubyPackage - * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption - * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos */ /** - * Constructs a new FileOptions. + * Constructs a new Timestamp. * @memberof google.protobuf - * @classdesc Represents a FileOptions. - * @implements IFileOptions + * @classdesc Represents a Timestamp. + * @implements ITimestamp * @constructor - * @param {google.protobuf.IFileOptions=} [properties] Properties to set + * @param {google.protobuf.ITimestamp=} [properties] Properties to set */ - function FileOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.resourceDefinition"] = []; + function Timestamp(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -15344,354 +22954,298 @@ } /** - * FileOptions javaPackage. - * @member {string} javaPackage - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaPackage = ""; - - /** - * FileOptions javaOuterClassname. - * @member {string} javaOuterClassname - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaOuterClassname = ""; - - /** - * FileOptions javaMultipleFiles. - * @member {boolean} javaMultipleFiles - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaMultipleFiles = false; - - /** - * FileOptions javaGenerateEqualsAndHash. - * @member {boolean} javaGenerateEqualsAndHash - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaGenerateEqualsAndHash = false; - - /** - * FileOptions javaStringCheckUtf8. - * @member {boolean} javaStringCheckUtf8 - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaStringCheckUtf8 = false; - - /** - * FileOptions optimizeFor. - * @member {google.protobuf.FileOptions.OptimizeMode} optimizeFor - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.optimizeFor = 1; - - /** - * FileOptions goPackage. - * @member {string} goPackage - * @memberof google.protobuf.FileOptions + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp * @instance */ - FileOptions.prototype.goPackage = ""; + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * FileOptions ccGenericServices. - * @member {boolean} ccGenericServices - * @memberof google.protobuf.FileOptions + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp * @instance */ - FileOptions.prototype.ccGenericServices = false; + Timestamp.prototype.nanos = 0; /** - * FileOptions javaGenericServices. - * @member {boolean} javaGenericServices - * @memberof google.protobuf.FileOptions - * @instance + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance */ - FileOptions.prototype.javaGenericServices = false; + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; /** - * FileOptions pyGenericServices. - * @member {boolean} pyGenericServices - * @memberof google.protobuf.FileOptions - * @instance + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - FileOptions.prototype.pyGenericServices = false; + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; /** - * FileOptions phpGenericServices. - * @member {boolean} phpGenericServices - * @memberof google.protobuf.FileOptions - * @instance + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - FileOptions.prototype.phpGenericServices = false; + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; /** - * FileOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.FileOptions - * @instance + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileOptions.prototype.deprecated = false; + Timestamp.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = reader.int64(); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; /** - * FileOptions ccEnableArenas. - * @member {boolean} ccEnableArenas - * @memberof google.protobuf.FileOptions - * @instance + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileOptions.prototype.ccEnableArenas = true; + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; /** - * FileOptions objcClassPrefix. - * @member {string} objcClassPrefix - * @memberof google.protobuf.FileOptions - * @instance + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - FileOptions.prototype.objcClassPrefix = ""; + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; /** - * FileOptions csharpNamespace. - * @member {string} csharpNamespace - * @memberof google.protobuf.FileOptions - * @instance + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp */ - FileOptions.prototype.csharpNamespace = ""; + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; /** - * FileOptions swiftPrefix. - * @member {string} swiftPrefix - * @memberof google.protobuf.FileOptions - * @instance + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object */ - FileOptions.prototype.swiftPrefix = ""; + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; /** - * FileOptions phpClassPrefix. - * @member {string} phpClassPrefix - * @memberof google.protobuf.FileOptions + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp * @instance + * @returns {Object.} JSON object */ - FileOptions.prototype.phpClassPrefix = ""; + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * FileOptions phpNamespace. - * @member {string} phpNamespace - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpNamespace = ""; + return Timestamp; + })(); - /** - * FileOptions phpMetadataNamespace. - * @member {string} phpMetadataNamespace - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpMetadataNamespace = ""; + protobuf.DoubleValue = (function() { /** - * FileOptions rubyPackage. - * @member {string} rubyPackage - * @memberof google.protobuf.FileOptions - * @instance + * Properties of a DoubleValue. + * @memberof google.protobuf + * @interface IDoubleValue + * @property {number|null} [value] DoubleValue value */ - FileOptions.prototype.rubyPackage = ""; /** - * FileOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.FileOptions - * @instance + * Constructs a new DoubleValue. + * @memberof google.protobuf + * @classdesc Represents a DoubleValue. + * @implements IDoubleValue + * @constructor + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set */ - FileOptions.prototype.uninterpretedOption = $util.emptyArray; + function DoubleValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } /** - * FileOptions .google.api.resourceDefinition. - * @member {Array.} .google.api.resourceDefinition - * @memberof google.protobuf.FileOptions + * DoubleValue value. + * @member {number} value + * @memberof google.protobuf.DoubleValue * @instance */ - FileOptions.prototype[".google.api.resourceDefinition"] = $util.emptyArray; + DoubleValue.prototype.value = 0; /** - * Creates a new FileOptions instance using the specified properties. + * Creates a new DoubleValue instance using the specified properties. * @function create - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.DoubleValue * @static - * @param {google.protobuf.IFileOptions=} [properties] Properties to set - * @returns {google.protobuf.FileOptions} FileOptions instance + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + * @returns {google.protobuf.DoubleValue} DoubleValue instance */ - FileOptions.create = function create(properties) { - return new FileOptions(properties); + DoubleValue.create = function create(properties) { + return new DoubleValue(properties); }; /** - * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. * @function encode - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.DoubleValue * @static - * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileOptions.encode = function encode(message, writer) { + DoubleValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); - if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) - writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); - if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); - if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) - writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); - if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) - writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); - if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) - writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); - if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) - writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); - if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) - writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); - if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) - writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); - if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) - writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); - if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) - writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); - if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) - writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); - if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) - writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); - if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) - writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); - if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) - writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); - if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) - writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); - if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) - writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); - if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) - writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); - if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) - writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resourceDefinition"] != null && message[".google.api.resourceDefinition"].length) - for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) - $root.google.api.ResourceDescriptor.encode(message[".google.api.resourceDefinition"][i], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); return writer; }; /** - * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.DoubleValue * @static - * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FileOptions.encodeDelimited = function encodeDelimited(message, writer) { + DoubleValue.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FileOptions message from the specified reader or buffer. + * Decodes a DoubleValue message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.DoubleValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileOptions} FileOptions + * @returns {google.protobuf.DoubleValue} DoubleValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileOptions.decode = function decode(reader, length) { + DoubleValue.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.javaPackage = reader.string(); - break; - case 8: - message.javaOuterClassname = reader.string(); - break; - case 10: - message.javaMultipleFiles = reader.bool(); - break; - case 20: - message.javaGenerateEqualsAndHash = reader.bool(); - break; - case 27: - message.javaStringCheckUtf8 = reader.bool(); - break; - case 9: - message.optimizeFor = reader.int32(); - break; - case 11: - message.goPackage = reader.string(); - break; - case 16: - message.ccGenericServices = reader.bool(); - break; - case 17: - message.javaGenericServices = reader.bool(); - break; - case 18: - message.pyGenericServices = reader.bool(); - break; - case 42: - message.phpGenericServices = reader.bool(); - break; - case 23: - message.deprecated = reader.bool(); - break; - case 31: - message.ccEnableArenas = reader.bool(); - break; - case 36: - message.objcClassPrefix = reader.string(); - break; - case 37: - message.csharpNamespace = reader.string(); - break; - case 39: - message.swiftPrefix = reader.string(); - break; - case 40: - message.phpClassPrefix = reader.string(); - break; - case 41: - message.phpNamespace = reader.string(); - break; - case 44: - message.phpMetadataNamespace = reader.string(); - break; - case 45: - message.rubyPackage = reader.string(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1053: - if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) - message[".google.api.resourceDefinition"] = []; - message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); + message.value = reader.double(); break; default: reader.skipType(tag & 7); @@ -15702,352 +23256,294 @@ }; /** - * Decodes a FileOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FileOptions + * Decodes a DoubleValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DoubleValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DoubleValue} DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DoubleValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DoubleValue message. + * @function verify + * @memberof google.protobuf.DoubleValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DoubleValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "number") + return "value: number expected"; + return null; + }; + + /** + * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DoubleValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DoubleValue} DoubleValue + */ + DoubleValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DoubleValue) + return object; + var message = new $root.google.protobuf.DoubleValue(); + if (object.value != null) + message.value = Number(object.value); + return message; + }; + + /** + * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.DoubleValue} message DoubleValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DoubleValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; + return object; + }; + + /** + * Converts this DoubleValue to JSON. + * @function toJSON + * @memberof google.protobuf.DoubleValue + * @instance + * @returns {Object.} JSON object + */ + DoubleValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return DoubleValue; + })(); + + protobuf.FloatValue = (function() { + + /** + * Properties of a FloatValue. + * @memberof google.protobuf + * @interface IFloatValue + * @property {number|null} [value] FloatValue value + */ + + /** + * Constructs a new FloatValue. + * @memberof google.protobuf + * @classdesc Represents a FloatValue. + * @implements IFloatValue + * @constructor + * @param {google.protobuf.IFloatValue=} [properties] Properties to set + */ + function FloatValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FloatValue value. + * @member {number} value + * @memberof google.protobuf.FloatValue + * @instance + */ + FloatValue.prototype.value = 0; + + /** + * Creates a new FloatValue instance using the specified properties. + * @function create + * @memberof google.protobuf.FloatValue * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileOptions} FileOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing + * @param {google.protobuf.IFloatValue=} [properties] Properties to set + * @returns {google.protobuf.FloatValue} FloatValue instance */ - FileOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); + FloatValue.create = function create(properties) { + return new FloatValue(properties); }; /** - * Verifies a FileOptions message. - * @function verify - * @memberof google.protobuf.FileOptions + * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FloatValue * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not + * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - FileOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) - if (!$util.isString(message.javaPackage)) - return "javaPackage: string expected"; - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) - if (!$util.isString(message.javaOuterClassname)) - return "javaOuterClassname: string expected"; - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) - if (typeof message.javaMultipleFiles !== "boolean") - return "javaMultipleFiles: boolean expected"; - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) - if (typeof message.javaGenerateEqualsAndHash !== "boolean") - return "javaGenerateEqualsAndHash: boolean expected"; - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) - if (typeof message.javaStringCheckUtf8 !== "boolean") - return "javaStringCheckUtf8: boolean expected"; - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) - switch (message.optimizeFor) { - default: - return "optimizeFor: enum value expected"; + FloatValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 5 =*/13).float(message.value); + return writer; + }; + + /** + * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FloatValue + * @static + * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FloatValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FloatValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FloatValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FloatValue} FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FloatValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FloatValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { case 1: - case 2: - case 3: + message.value = reader.float(); + break; + default: + reader.skipType(tag & 7); break; - } - if (message.goPackage != null && message.hasOwnProperty("goPackage")) - if (!$util.isString(message.goPackage)) - return "goPackage: string expected"; - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) - if (typeof message.ccGenericServices !== "boolean") - return "ccGenericServices: boolean expected"; - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) - if (typeof message.javaGenericServices !== "boolean") - return "javaGenericServices: boolean expected"; - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) - if (typeof message.pyGenericServices !== "boolean") - return "pyGenericServices: boolean expected"; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - if (typeof message.phpGenericServices !== "boolean") - return "phpGenericServices: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) - if (typeof message.ccEnableArenas !== "boolean") - return "ccEnableArenas: boolean expected"; - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) - if (!$util.isString(message.objcClassPrefix)) - return "objcClassPrefix: string expected"; - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) - if (!$util.isString(message.csharpNamespace)) - return "csharpNamespace: string expected"; - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) - if (!$util.isString(message.swiftPrefix)) - return "swiftPrefix: string expected"; - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) - if (!$util.isString(message.phpClassPrefix)) - return "phpClassPrefix: string expected"; - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) - if (!$util.isString(message.phpNamespace)) - return "phpNamespace: string expected"; - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) - if (!$util.isString(message.phpMetadataNamespace)) - return "phpMetadataNamespace: string expected"; - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) - if (!$util.isString(message.rubyPackage)) - return "rubyPackage: string expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.resourceDefinition"] != null && message.hasOwnProperty(".google.api.resourceDefinition")) { - if (!Array.isArray(message[".google.api.resourceDefinition"])) - return ".google.api.resourceDefinition: array expected"; - for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) { - var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resourceDefinition"][i]); - if (error) - return ".google.api.resourceDefinition." + error; } } - return null; + return message; }; /** - * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FileOptions + * Decodes a FloatValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FloatValue * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FileOptions} FileOptions - */ - FileOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileOptions) - return object; - var message = new $root.google.protobuf.FileOptions(); - if (object.javaPackage != null) - message.javaPackage = String(object.javaPackage); - if (object.javaOuterClassname != null) - message.javaOuterClassname = String(object.javaOuterClassname); - if (object.javaMultipleFiles != null) - message.javaMultipleFiles = Boolean(object.javaMultipleFiles); - if (object.javaGenerateEqualsAndHash != null) - message.javaGenerateEqualsAndHash = Boolean(object.javaGenerateEqualsAndHash); - if (object.javaStringCheckUtf8 != null) - message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); - switch (object.optimizeFor) { - case "SPEED": - case 1: - message.optimizeFor = 1; - break; - case "CODE_SIZE": - case 2: - message.optimizeFor = 2; - break; - case "LITE_RUNTIME": - case 3: - message.optimizeFor = 3; - break; - } - if (object.goPackage != null) - message.goPackage = String(object.goPackage); - if (object.ccGenericServices != null) - message.ccGenericServices = Boolean(object.ccGenericServices); - if (object.javaGenericServices != null) - message.javaGenericServices = Boolean(object.javaGenericServices); - if (object.pyGenericServices != null) - message.pyGenericServices = Boolean(object.pyGenericServices); - if (object.phpGenericServices != null) - message.phpGenericServices = Boolean(object.phpGenericServices); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.ccEnableArenas != null) - message.ccEnableArenas = Boolean(object.ccEnableArenas); - if (object.objcClassPrefix != null) - message.objcClassPrefix = String(object.objcClassPrefix); - if (object.csharpNamespace != null) - message.csharpNamespace = String(object.csharpNamespace); - if (object.swiftPrefix != null) - message.swiftPrefix = String(object.swiftPrefix); - if (object.phpClassPrefix != null) - message.phpClassPrefix = String(object.phpClassPrefix); - if (object.phpNamespace != null) - message.phpNamespace = String(object.phpNamespace); - if (object.phpMetadataNamespace != null) - message.phpMetadataNamespace = String(object.phpMetadataNamespace); - if (object.rubyPackage != null) - message.rubyPackage = String(object.rubyPackage); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.resourceDefinition"]) { - if (!Array.isArray(object[".google.api.resourceDefinition"])) - throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: array expected"); - message[".google.api.resourceDefinition"] = []; - for (var i = 0; i < object[".google.api.resourceDefinition"].length; ++i) { - if (typeof object[".google.api.resourceDefinition"][i] !== "object") - throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: object expected"); - message[".google.api.resourceDefinition"][i] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resourceDefinition"][i]); - } - } + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FloatValue} FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FloatValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FloatValue message. + * @function verify + * @memberof google.protobuf.FloatValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FloatValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "number") + return "value: number expected"; + return null; + }; + + /** + * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FloatValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FloatValue} FloatValue + */ + FloatValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FloatValue) + return object; + var message = new $root.google.protobuf.FloatValue(); + if (object.value != null) + message.value = Number(object.value); return message; }; /** - * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * Creates a plain object from a FloatValue message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.FloatValue * @static - * @param {google.protobuf.FileOptions} message FileOptions + * @param {google.protobuf.FloatValue} message FloatValue * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FileOptions.toObject = function toObject(message, options) { + FloatValue.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.resourceDefinition"] = []; - } - if (options.defaults) { - object.javaPackage = ""; - object.javaOuterClassname = ""; - object.optimizeFor = options.enums === String ? "SPEED" : 1; - object.javaMultipleFiles = false; - object.goPackage = ""; - object.ccGenericServices = false; - object.javaGenericServices = false; - object.pyGenericServices = false; - object.javaGenerateEqualsAndHash = false; - object.deprecated = false; - object.javaStringCheckUtf8 = false; - object.ccEnableArenas = true; - object.objcClassPrefix = ""; - object.csharpNamespace = ""; - object.swiftPrefix = ""; - object.phpClassPrefix = ""; - object.phpNamespace = ""; - object.phpGenericServices = false; - object.phpMetadataNamespace = ""; - object.rubyPackage = ""; - } - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) - object.javaPackage = message.javaPackage; - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) - object.javaOuterClassname = message.javaOuterClassname; - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) - object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) - object.javaMultipleFiles = message.javaMultipleFiles; - if (message.goPackage != null && message.hasOwnProperty("goPackage")) - object.goPackage = message.goPackage; - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) - object.ccGenericServices = message.ccGenericServices; - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) - object.javaGenericServices = message.javaGenericServices; - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) - object.pyGenericServices = message.pyGenericServices; - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) - object.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) - object.javaStringCheckUtf8 = message.javaStringCheckUtf8; - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) - object.ccEnableArenas = message.ccEnableArenas; - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) - object.objcClassPrefix = message.objcClassPrefix; - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) - object.csharpNamespace = message.csharpNamespace; - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) - object.swiftPrefix = message.swiftPrefix; - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) - object.phpClassPrefix = message.phpClassPrefix; - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) - object.phpNamespace = message.phpNamespace; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - object.phpGenericServices = message.phpGenericServices; - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) - object.phpMetadataNamespace = message.phpMetadataNamespace; - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) - object.rubyPackage = message.rubyPackage; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length) { - object[".google.api.resourceDefinition"] = []; - for (var j = 0; j < message[".google.api.resourceDefinition"].length; ++j) - object[".google.api.resourceDefinition"][j] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resourceDefinition"][j], options); - } + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; return object; }; /** - * Converts this FileOptions to JSON. + * Converts this FloatValue to JSON. * @function toJSON - * @memberof google.protobuf.FileOptions + * @memberof google.protobuf.FloatValue * @instance * @returns {Object.} JSON object */ - FileOptions.prototype.toJSON = function toJSON() { + FloatValue.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - /** - * OptimizeMode enum. - * @name google.protobuf.FileOptions.OptimizeMode - * @enum {number} - * @property {number} SPEED=1 SPEED value - * @property {number} CODE_SIZE=2 CODE_SIZE value - * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value - */ - FileOptions.OptimizeMode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "SPEED"] = 1; - values[valuesById[2] = "CODE_SIZE"] = 2; - values[valuesById[3] = "LITE_RUNTIME"] = 3; - return values; - })(); - - return FileOptions; + return FloatValue; })(); - protobuf.MessageOptions = (function() { + protobuf.Int64Value = (function() { /** - * Properties of a MessageOptions. + * Properties of an Int64Value. * @memberof google.protobuf - * @interface IMessageOptions - * @property {boolean|null} [messageSetWireFormat] MessageOptions messageSetWireFormat - * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor - * @property {boolean|null} [deprecated] MessageOptions deprecated - * @property {boolean|null} [mapEntry] MessageOptions mapEntry - * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption - * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource + * @interface IInt64Value + * @property {number|Long|null} [value] Int64Value value */ /** - * Constructs a new MessageOptions. + * Constructs a new Int64Value. * @memberof google.protobuf - * @classdesc Represents a MessageOptions. - * @implements IMessageOptions + * @classdesc Represents an Int64Value. + * @implements IInt64Value * @constructor - * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + * @param {google.protobuf.IInt64Value=} [properties] Properties to set */ - function MessageOptions(properties) { - this.uninterpretedOption = []; + function Int64Value(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -16055,143 +23551,276 @@ } /** - * MessageOptions messageSetWireFormat. - * @member {boolean} messageSetWireFormat - * @memberof google.protobuf.MessageOptions + * Int64Value value. + * @member {number|Long} value + * @memberof google.protobuf.Int64Value * @instance */ - MessageOptions.prototype.messageSetWireFormat = false; + Int64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * MessageOptions noStandardDescriptorAccessor. - * @member {boolean} noStandardDescriptorAccessor - * @memberof google.protobuf.MessageOptions - * @instance + * Creates a new Int64Value instance using the specified properties. + * @function create + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value=} [properties] Properties to set + * @returns {google.protobuf.Int64Value} Int64Value instance */ - MessageOptions.prototype.noStandardDescriptorAccessor = false; + Int64Value.create = function create(properties) { + return new Int64Value(properties); + }; /** - * MessageOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.MessageOptions + * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int64Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.value); + return writer; + }; + + /** + * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int64Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Int64Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Int64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Int64Value} Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int64Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int64Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.value = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Int64Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Int64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Int64Value} Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int64Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Int64Value message. + * @function verify + * @memberof google.protobuf.Int64Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Int64Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) + return "value: integer|Long expected"; + return null; + }; + + /** + * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Int64Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Int64Value} Int64Value + */ + Int64Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Int64Value) + return object; + var message = new $root.google.protobuf.Int64Value(); + if (object.value != null) + if ($util.Long) + (message.value = $util.Long.fromValue(object.value)).unsigned = false; + else if (typeof object.value === "string") + message.value = parseInt(object.value, 10); + else if (typeof object.value === "number") + message.value = object.value; + else if (typeof object.value === "object") + message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an Int64Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.Int64Value} message Int64Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Int64Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.value = options.longs === String ? "0" : 0; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value === "number") + object.value = options.longs === String ? String(message.value) : message.value; + else + object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber() : message.value; + return object; + }; + + /** + * Converts this Int64Value to JSON. + * @function toJSON + * @memberof google.protobuf.Int64Value * @instance + * @returns {Object.} JSON object */ - MessageOptions.prototype.deprecated = false; + Int64Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Int64Value; + })(); + + protobuf.UInt64Value = (function() { /** - * MessageOptions mapEntry. - * @member {boolean} mapEntry - * @memberof google.protobuf.MessageOptions - * @instance + * Properties of a UInt64Value. + * @memberof google.protobuf + * @interface IUInt64Value + * @property {number|Long|null} [value] UInt64Value value */ - MessageOptions.prototype.mapEntry = false; /** - * MessageOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.MessageOptions - * @instance + * Constructs a new UInt64Value. + * @memberof google.protobuf + * @classdesc Represents a UInt64Value. + * @implements IUInt64Value + * @constructor + * @param {google.protobuf.IUInt64Value=} [properties] Properties to set */ - MessageOptions.prototype.uninterpretedOption = $util.emptyArray; + function UInt64Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } /** - * MessageOptions .google.api.resource. - * @member {google.api.IResourceDescriptor|null|undefined} .google.api.resource - * @memberof google.protobuf.MessageOptions + * UInt64Value value. + * @member {number|Long} value + * @memberof google.protobuf.UInt64Value * @instance */ - MessageOptions.prototype[".google.api.resource"] = null; + UInt64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,true) : 0; /** - * Creates a new MessageOptions instance using the specified properties. + * Creates a new UInt64Value instance using the specified properties. * @function create - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static - * @param {google.protobuf.IMessageOptions=} [properties] Properties to set - * @returns {google.protobuf.MessageOptions} MessageOptions instance + * @param {google.protobuf.IUInt64Value=} [properties] Properties to set + * @returns {google.protobuf.UInt64Value} UInt64Value instance */ - MessageOptions.create = function create(properties) { - return new MessageOptions(properties); + UInt64Value.create = function create(properties) { + return new UInt64Value(properties); }; /** - * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. * @function encode - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static - * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - MessageOptions.encode = function encode(message, writer) { + UInt64Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); - if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) - writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) - $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).uint64(message.value); return writer; }; /** - * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static - * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - MessageOptions.encodeDelimited = function encodeDelimited(message, writer) { + UInt64Value.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a MessageOptions message from the specified reader or buffer. + * Decodes a UInt64Value message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MessageOptions} MessageOptions + * @returns {google.protobuf.UInt64Value} UInt64Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MessageOptions.decode = function decode(reader, length) { + UInt64Value.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt64Value(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.messageSetWireFormat = reader.bool(); - break; - case 2: - message.noStandardDescriptorAccessor = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 7: - message.mapEntry = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1053: - message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); + message.value = reader.uint64(); break; default: reader.skipType(tag & 7); @@ -16202,375 +23831,197 @@ }; /** - * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * Decodes a UInt64Value message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MessageOptions} MessageOptions + * @returns {google.protobuf.UInt64Value} UInt64Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MessageOptions.decodeDelimited = function decodeDelimited(reader) { + UInt64Value.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a MessageOptions message. + * Verifies a UInt64Value message. * @function verify - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - MessageOptions.verify = function verify(message) { + UInt64Value.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) - if (typeof message.messageSetWireFormat !== "boolean") - return "messageSetWireFormat: boolean expected"; - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) - if (typeof message.noStandardDescriptorAccessor !== "boolean") - return "noStandardDescriptorAccessor: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) - if (typeof message.mapEntry !== "boolean") - return "mapEntry: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) { - var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resource"]); - if (error) - return ".google.api.resource." + error; - } + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) + return "value: integer|Long expected"; return null; }; /** - * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static * @param {Object.} object Plain object - * @returns {google.protobuf.MessageOptions} MessageOptions + * @returns {google.protobuf.UInt64Value} UInt64Value */ - MessageOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MessageOptions) + UInt64Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UInt64Value) return object; - var message = new $root.google.protobuf.MessageOptions(); - if (object.messageSetWireFormat != null) - message.messageSetWireFormat = Boolean(object.messageSetWireFormat); - if (object.noStandardDescriptorAccessor != null) - message.noStandardDescriptorAccessor = Boolean(object.noStandardDescriptorAccessor); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.mapEntry != null) - message.mapEntry = Boolean(object.mapEntry); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.resource"] != null) { - if (typeof object[".google.api.resource"] !== "object") - throw TypeError(".google.protobuf.MessageOptions..google.api.resource: object expected"); - message[".google.api.resource"] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resource"]); - } + var message = new $root.google.protobuf.UInt64Value(); + if (object.value != null) + if ($util.Long) + (message.value = $util.Long.fromValue(object.value)).unsigned = true; + else if (typeof object.value === "string") + message.value = parseInt(object.value, 10); + else if (typeof object.value === "number") + message.value = object.value; + else if (typeof object.value === "object") + message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(true); return message; }; /** - * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @static - * @param {google.protobuf.MessageOptions} message MessageOptions + * @param {google.protobuf.UInt64Value} message UInt64Value * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - MessageOptions.toObject = function toObject(message, options) { + UInt64Value.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.messageSetWireFormat = false; - object.noStandardDescriptorAccessor = false; - object.deprecated = false; - object.mapEntry = false; - object[".google.api.resource"] = null; - } - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) - object.messageSetWireFormat = message.messageSetWireFormat; - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) - object.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) - object.mapEntry = message.mapEntry; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) - object[".google.api.resource"] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resource"], options); + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, true); + object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.value = options.longs === String ? "0" : 0; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value === "number") + object.value = options.longs === String ? String(message.value) : message.value; + else + object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber(true) : message.value; return object; }; /** - * Converts this MessageOptions to JSON. + * Converts this UInt64Value to JSON. * @function toJSON - * @memberof google.protobuf.MessageOptions + * @memberof google.protobuf.UInt64Value * @instance * @returns {Object.} JSON object */ - MessageOptions.prototype.toJSON = function toJSON() { + UInt64Value.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return MessageOptions; + return UInt64Value; })(); - protobuf.FieldOptions = (function() { + protobuf.Int32Value = (function() { /** - * Properties of a FieldOptions. + * Properties of an Int32Value. * @memberof google.protobuf - * @interface IFieldOptions - * @property {google.protobuf.FieldOptions.CType|null} [ctype] FieldOptions ctype - * @property {boolean|null} [packed] FieldOptions packed - * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype - * @property {boolean|null} [lazy] FieldOptions lazy - * @property {boolean|null} [deprecated] FieldOptions deprecated - * @property {boolean|null} [weak] FieldOptions weak - * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption - * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior - * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference + * @interface IInt32Value + * @property {number|null} [value] Int32Value value */ /** - * Constructs a new FieldOptions. + * Constructs a new Int32Value. * @memberof google.protobuf - * @classdesc Represents a FieldOptions. - * @implements IFieldOptions + * @classdesc Represents an Int32Value. + * @implements IInt32Value * @constructor - * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + * @param {google.protobuf.IInt32Value=} [properties] Properties to set */ - function FieldOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.fieldBehavior"] = []; + function Int32Value(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) this[keys[i]] = properties[keys[i]]; - } - - /** - * FieldOptions ctype. - * @member {google.protobuf.FieldOptions.CType} ctype - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.ctype = 0; - - /** - * FieldOptions packed. - * @member {boolean} packed - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.packed = false; - - /** - * FieldOptions jstype. - * @member {google.protobuf.FieldOptions.JSType} jstype - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.jstype = 0; - - /** - * FieldOptions lazy. - * @member {boolean} lazy - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.lazy = false; - - /** - * FieldOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.deprecated = false; - - /** - * FieldOptions weak. - * @member {boolean} weak - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.weak = false; - - /** - * FieldOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * FieldOptions .google.api.fieldBehavior. - * @member {Array.} .google.api.fieldBehavior - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype[".google.api.fieldBehavior"] = $util.emptyArray; + } /** - * FieldOptions .google.api.resourceReference. - * @member {google.api.IResourceReference|null|undefined} .google.api.resourceReference - * @memberof google.protobuf.FieldOptions + * Int32Value value. + * @member {number} value + * @memberof google.protobuf.Int32Value * @instance */ - FieldOptions.prototype[".google.api.resourceReference"] = null; + Int32Value.prototype.value = 0; /** - * Creates a new FieldOptions instance using the specified properties. + * Creates a new Int32Value instance using the specified properties. * @function create - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static - * @param {google.protobuf.IFieldOptions=} [properties] Properties to set - * @returns {google.protobuf.FieldOptions} FieldOptions instance + * @param {google.protobuf.IInt32Value=} [properties] Properties to set + * @returns {google.protobuf.Int32Value} Int32Value instance */ - FieldOptions.create = function create(properties) { - return new FieldOptions(properties); + Int32Value.create = function create(properties) { + return new Int32Value(properties); }; /** - * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. * @function encode - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static - * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FieldOptions.encode = function encode(message, writer) { + Int32Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); - if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) - writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); - if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) - writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); - if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) - writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { - writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); - for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) - writer.int32(message[".google.api.fieldBehavior"][i]); - writer.ldelim(); - } - if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) - $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.value); return writer; }; /** - * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static - * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - FieldOptions.encodeDelimited = function encodeDelimited(message, writer) { + Int32Value.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a FieldOptions message from the specified reader or buffer. + * Decodes an Int32Value message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FieldOptions} FieldOptions + * @returns {google.protobuf.Int32Value} Int32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldOptions.decode = function decode(reader, length) { + Int32Value.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int32Value(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.ctype = reader.int32(); - break; - case 2: - message.packed = reader.bool(); - break; - case 6: - message.jstype = reader.int32(); - break; - case 5: - message.lazy = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 10: - message.weak = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1052: - if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) - message[".google.api.fieldBehavior"] = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message[".google.api.fieldBehavior"].push(reader.int32()); - } else - message[".google.api.fieldBehavior"].push(reader.int32()); - break; - case 1055: - message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); + message.value = reader.int32(); break; default: reader.skipType(tag & 7); @@ -16581,317 +24032,107 @@ }; /** - * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * Decodes an Int32Value message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FieldOptions} FieldOptions + * @returns {google.protobuf.Int32Value} Int32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldOptions.decodeDelimited = function decodeDelimited(reader) { + Int32Value.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a FieldOptions message. + * Verifies an Int32Value message. * @function verify - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FieldOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.ctype != null && message.hasOwnProperty("ctype")) - switch (message.ctype) { - default: - return "ctype: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.packed != null && message.hasOwnProperty("packed")) - if (typeof message.packed !== "boolean") - return "packed: boolean expected"; - if (message.jstype != null && message.hasOwnProperty("jstype")) - switch (message.jstype) { - default: - return "jstype: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.lazy != null && message.hasOwnProperty("lazy")) - if (typeof message.lazy !== "boolean") - return "lazy: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.weak != null && message.hasOwnProperty("weak")) - if (typeof message.weak !== "boolean") - return "weak: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { - if (!Array.isArray(message[".google.api.fieldBehavior"])) - return ".google.api.fieldBehavior: array expected"; - for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) - switch (message[".google.api.fieldBehavior"][i]) { - default: - return ".google.api.fieldBehavior: enum value[] expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - break; - } - } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) { - var error = $root.google.api.ResourceReference.verify(message[".google.api.resourceReference"]); - if (error) - return ".google.api.resourceReference." + error; - } - return null; - }; - - /** - * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FieldOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FieldOptions} FieldOptions - */ - FieldOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FieldOptions) - return object; - var message = new $root.google.protobuf.FieldOptions(); - switch (object.ctype) { - case "STRING": - case 0: - message.ctype = 0; - break; - case "CORD": - case 1: - message.ctype = 1; - break; - case "STRING_PIECE": - case 2: - message.ctype = 2; - break; - } - if (object.packed != null) - message.packed = Boolean(object.packed); - switch (object.jstype) { - case "JS_NORMAL": - case 0: - message.jstype = 0; - break; - case "JS_STRING": - case 1: - message.jstype = 1; - break; - case "JS_NUMBER": - case 2: - message.jstype = 2; - break; - } - if (object.lazy != null) - message.lazy = Boolean(object.lazy); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.weak != null) - message.weak = Boolean(object.weak); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.fieldBehavior"]) { - if (!Array.isArray(object[".google.api.fieldBehavior"])) - throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); - message[".google.api.fieldBehavior"] = []; - for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) - switch (object[".google.api.fieldBehavior"][i]) { - default: - case "FIELD_BEHAVIOR_UNSPECIFIED": - case 0: - message[".google.api.fieldBehavior"][i] = 0; - break; - case "OPTIONAL": - case 1: - message[".google.api.fieldBehavior"][i] = 1; - break; - case "REQUIRED": - case 2: - message[".google.api.fieldBehavior"][i] = 2; - break; - case "OUTPUT_ONLY": - case 3: - message[".google.api.fieldBehavior"][i] = 3; - break; - case "INPUT_ONLY": - case 4: - message[".google.api.fieldBehavior"][i] = 4; - break; - case "IMMUTABLE": - case 5: - message[".google.api.fieldBehavior"][i] = 5; - break; - case "UNORDERED_LIST": - case 6: - message[".google.api.fieldBehavior"][i] = 6; - break; - } - } - if (object[".google.api.resourceReference"] != null) { - if (typeof object[".google.api.resourceReference"] !== "object") - throw TypeError(".google.protobuf.FieldOptions..google.api.resourceReference: object expected"); - message[".google.api.resourceReference"] = $root.google.api.ResourceReference.fromObject(object[".google.api.resourceReference"]); - } + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Int32Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value)) + return "value: integer expected"; + return null; + }; + + /** + * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Int32Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Int32Value} Int32Value + */ + Int32Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Int32Value) + return object; + var message = new $root.google.protobuf.Int32Value(); + if (object.value != null) + message.value = object.value | 0; return message; }; /** - * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * Creates a plain object from an Int32Value message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @static - * @param {google.protobuf.FieldOptions} message FieldOptions + * @param {google.protobuf.Int32Value} message Int32Value * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - FieldOptions.toObject = function toObject(message, options) { + Int32Value.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.fieldBehavior"] = []; - } - if (options.defaults) { - object.ctype = options.enums === String ? "STRING" : 0; - object.packed = false; - object.deprecated = false; - object.lazy = false; - object.jstype = options.enums === String ? "JS_NORMAL" : 0; - object.weak = false; - object[".google.api.resourceReference"] = null; - } - if (message.ctype != null && message.hasOwnProperty("ctype")) - object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; - if (message.packed != null && message.hasOwnProperty("packed")) - object.packed = message.packed; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.lazy != null && message.hasOwnProperty("lazy")) - object.lazy = message.lazy; - if (message.jstype != null && message.hasOwnProperty("jstype")) - object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; - if (message.weak != null && message.hasOwnProperty("weak")) - object.weak = message.weak; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { - object[".google.api.fieldBehavior"] = []; - for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) - object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; - } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) - object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; return object; }; /** - * Converts this FieldOptions to JSON. + * Converts this Int32Value to JSON. * @function toJSON - * @memberof google.protobuf.FieldOptions + * @memberof google.protobuf.Int32Value * @instance * @returns {Object.} JSON object */ - FieldOptions.prototype.toJSON = function toJSON() { + Int32Value.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - /** - * CType enum. - * @name google.protobuf.FieldOptions.CType - * @enum {number} - * @property {number} STRING=0 STRING value - * @property {number} CORD=1 CORD value - * @property {number} STRING_PIECE=2 STRING_PIECE value - */ - FieldOptions.CType = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STRING"] = 0; - values[valuesById[1] = "CORD"] = 1; - values[valuesById[2] = "STRING_PIECE"] = 2; - return values; - })(); - - /** - * JSType enum. - * @name google.protobuf.FieldOptions.JSType - * @enum {number} - * @property {number} JS_NORMAL=0 JS_NORMAL value - * @property {number} JS_STRING=1 JS_STRING value - * @property {number} JS_NUMBER=2 JS_NUMBER value - */ - FieldOptions.JSType = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "JS_NORMAL"] = 0; - values[valuesById[1] = "JS_STRING"] = 1; - values[valuesById[2] = "JS_NUMBER"] = 2; - return values; - })(); - - return FieldOptions; + return Int32Value; })(); - protobuf.OneofOptions = (function() { + protobuf.UInt32Value = (function() { /** - * Properties of an OneofOptions. + * Properties of a UInt32Value. * @memberof google.protobuf - * @interface IOneofOptions - * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption + * @interface IUInt32Value + * @property {number|null} [value] UInt32Value value */ /** - * Constructs a new OneofOptions. + * Constructs a new UInt32Value. * @memberof google.protobuf - * @classdesc Represents an OneofOptions. - * @implements IOneofOptions + * @classdesc Represents a UInt32Value. + * @implements IUInt32Value * @constructor - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + * @param {google.protobuf.IUInt32Value=} [properties] Properties to set */ - function OneofOptions(properties) { - this.uninterpretedOption = []; + function UInt32Value(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -16899,78 +24140,75 @@ } /** - * OneofOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.OneofOptions + * UInt32Value value. + * @member {number} value + * @memberof google.protobuf.UInt32Value * @instance */ - OneofOptions.prototype.uninterpretedOption = $util.emptyArray; + UInt32Value.prototype.value = 0; /** - * Creates a new OneofOptions instance using the specified properties. + * Creates a new UInt32Value instance using the specified properties. * @function create - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set - * @returns {google.protobuf.OneofOptions} OneofOptions instance + * @param {google.protobuf.IUInt32Value=} [properties] Properties to set + * @returns {google.protobuf.UInt32Value} UInt32Value instance */ - OneofOptions.create = function create(properties) { - return new OneofOptions(properties); + UInt32Value.create = function create(properties) { + return new UInt32Value(properties); }; /** - * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. * @function encode - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - OneofOptions.encode = function encode(message, writer) { + UInt32Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).uint32(message.value); return writer; }; /** - * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { + UInt32Value.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an OneofOptions message from the specified reader or buffer. + * Decodes a UInt32Value message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.OneofOptions} OneofOptions + * @returns {google.protobuf.UInt32Value} UInt32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - OneofOptions.decode = function decode(reader, length) { + UInt32Value.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt32Value(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + case 1: + message.value = reader.uint32(); break; default: reader.skipType(tag & 7); @@ -16981,127 +24219,107 @@ }; /** - * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * Decodes a UInt32Value message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.OneofOptions} OneofOptions + * @returns {google.protobuf.UInt32Value} UInt32Value * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - OneofOptions.decodeDelimited = function decodeDelimited(reader) { + UInt32Value.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an OneofOptions message. + * Verifies a UInt32Value message. * @function verify - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - OneofOptions.verify = function verify(message) { + UInt32Value.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value)) + return "value: integer expected"; return null; }; /** - * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static * @param {Object.} object Plain object - * @returns {google.protobuf.OneofOptions} OneofOptions + * @returns {google.protobuf.UInt32Value} UInt32Value */ - OneofOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.OneofOptions) + UInt32Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UInt32Value) return object; - var message = new $root.google.protobuf.OneofOptions(); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } + var message = new $root.google.protobuf.UInt32Value(); + if (object.value != null) + message.value = object.value >>> 0; return message; }; /** - * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @static - * @param {google.protobuf.OneofOptions} message OneofOptions + * @param {google.protobuf.UInt32Value} message UInt32Value * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - OneofOptions.toObject = function toObject(message, options) { + UInt32Value.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; return object; }; /** - * Converts this OneofOptions to JSON. + * Converts this UInt32Value to JSON. * @function toJSON - * @memberof google.protobuf.OneofOptions + * @memberof google.protobuf.UInt32Value * @instance * @returns {Object.} JSON object */ - OneofOptions.prototype.toJSON = function toJSON() { + UInt32Value.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return OneofOptions; + return UInt32Value; })(); - protobuf.EnumOptions = (function() { + protobuf.BoolValue = (function() { /** - * Properties of an EnumOptions. + * Properties of a BoolValue. * @memberof google.protobuf - * @interface IEnumOptions - * @property {boolean|null} [allowAlias] EnumOptions allowAlias - * @property {boolean|null} [deprecated] EnumOptions deprecated - * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption + * @interface IBoolValue + * @property {boolean|null} [value] BoolValue value */ /** - * Constructs a new EnumOptions. + * Constructs a new BoolValue. * @memberof google.protobuf - * @classdesc Represents an EnumOptions. - * @implements IEnumOptions + * @classdesc Represents a BoolValue. + * @implements IBoolValue * @constructor - * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + * @param {google.protobuf.IBoolValue=} [properties] Properties to set */ - function EnumOptions(properties) { - this.uninterpretedOption = []; + function BoolValue(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -17109,104 +24327,75 @@ } /** - * EnumOptions allowAlias. - * @member {boolean} allowAlias - * @memberof google.protobuf.EnumOptions - * @instance - */ - EnumOptions.prototype.allowAlias = false; - - /** - * EnumOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.EnumOptions - * @instance - */ - EnumOptions.prototype.deprecated = false; - - /** - * EnumOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.EnumOptions + * BoolValue value. + * @member {boolean} value + * @memberof google.protobuf.BoolValue * @instance */ - EnumOptions.prototype.uninterpretedOption = $util.emptyArray; + BoolValue.prototype.value = false; /** - * Creates a new EnumOptions instance using the specified properties. + * Creates a new BoolValue instance using the specified properties. * @function create - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static - * @param {google.protobuf.IEnumOptions=} [properties] Properties to set - * @returns {google.protobuf.EnumOptions} EnumOptions instance + * @param {google.protobuf.IBoolValue=} [properties] Properties to set + * @returns {google.protobuf.BoolValue} BoolValue instance */ - EnumOptions.create = function create(properties) { - return new EnumOptions(properties); + BoolValue.create = function create(properties) { + return new BoolValue(properties); }; /** - * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. * @function encode - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static - * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumOptions.encode = function encode(message, writer) { + BoolValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.value); return writer; }; /** - * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static - * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumOptions.encodeDelimited = function encodeDelimited(message, writer) { + BoolValue.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an EnumOptions message from the specified reader or buffer. + * Decodes a BoolValue message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumOptions} EnumOptions + * @returns {google.protobuf.BoolValue} BoolValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumOptions.decode = function decode(reader, length) { + BoolValue.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BoolValue(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.allowAlias = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + case 1: + message.value = reader.bool(); break; default: reader.skipType(tag & 7); @@ -17217,144 +24406,107 @@ }; /** - * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * Decodes a BoolValue message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumOptions} EnumOptions + * @returns {google.protobuf.BoolValue} BoolValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumOptions.decodeDelimited = function decodeDelimited(reader) { + BoolValue.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an EnumOptions message. + * Verifies a BoolValue message. * @function verify - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - EnumOptions.verify = function verify(message) { + BoolValue.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) - if (typeof message.allowAlias !== "boolean") - return "allowAlias: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "boolean") + return "value: boolean expected"; return null; }; /** - * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static * @param {Object.} object Plain object - * @returns {google.protobuf.EnumOptions} EnumOptions + * @returns {google.protobuf.BoolValue} BoolValue */ - EnumOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumOptions) + BoolValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.BoolValue) return object; - var message = new $root.google.protobuf.EnumOptions(); - if (object.allowAlias != null) - message.allowAlias = Boolean(object.allowAlias); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } + var message = new $root.google.protobuf.BoolValue(); + if (object.value != null) + message.value = Boolean(object.value); return message; }; /** - * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * Creates a plain object from a BoolValue message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @static - * @param {google.protobuf.EnumOptions} message EnumOptions + * @param {google.protobuf.BoolValue} message BoolValue * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - EnumOptions.toObject = function toObject(message, options) { + BoolValue.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.allowAlias = false; - object.deprecated = false; - } - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) - object.allowAlias = message.allowAlias; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } + if (options.defaults) + object.value = false; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; return object; }; /** - * Converts this EnumOptions to JSON. + * Converts this BoolValue to JSON. * @function toJSON - * @memberof google.protobuf.EnumOptions + * @memberof google.protobuf.BoolValue * @instance * @returns {Object.} JSON object */ - EnumOptions.prototype.toJSON = function toJSON() { + BoolValue.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return EnumOptions; + return BoolValue; })(); - protobuf.EnumValueOptions = (function() { + protobuf.StringValue = (function() { /** - * Properties of an EnumValueOptions. + * Properties of a StringValue. * @memberof google.protobuf - * @interface IEnumValueOptions - * @property {boolean|null} [deprecated] EnumValueOptions deprecated - * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption + * @interface IStringValue + * @property {string|null} [value] StringValue value */ /** - * Constructs a new EnumValueOptions. + * Constructs a new StringValue. * @memberof google.protobuf - * @classdesc Represents an EnumValueOptions. - * @implements IEnumValueOptions + * @classdesc Represents a StringValue. + * @implements IStringValue * @constructor - * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + * @param {google.protobuf.IStringValue=} [properties] Properties to set */ - function EnumValueOptions(properties) { - this.uninterpretedOption = []; + function StringValue(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -17362,91 +24514,75 @@ } /** - * EnumValueOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.EnumValueOptions - * @instance - */ - EnumValueOptions.prototype.deprecated = false; - - /** - * EnumValueOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.EnumValueOptions + * StringValue value. + * @member {string} value + * @memberof google.protobuf.StringValue * @instance */ - EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; + StringValue.prototype.value = ""; /** - * Creates a new EnumValueOptions instance using the specified properties. + * Creates a new StringValue instance using the specified properties. * @function create - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static - * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance + * @param {google.protobuf.IStringValue=} [properties] Properties to set + * @returns {google.protobuf.StringValue} StringValue instance */ - EnumValueOptions.create = function create(properties) { - return new EnumValueOptions(properties); + StringValue.create = function create(properties) { + return new StringValue(properties); }; /** - * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. * @function encode - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static - * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumValueOptions.encode = function encode(message, writer) { + StringValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.value); return writer; }; /** - * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static - * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - EnumValueOptions.encodeDelimited = function encodeDelimited(message, writer) { + StringValue.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an EnumValueOptions message from the specified reader or buffer. + * Decodes a StringValue message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @returns {google.protobuf.StringValue} StringValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumValueOptions.decode = function decode(reader, length) { + StringValue.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.StringValue(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + message.value = reader.string(); break; default: reader.skipType(tag & 7); @@ -17457,137 +24593,107 @@ }; /** - * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * Decodes a StringValue message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @returns {google.protobuf.StringValue} StringValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumValueOptions.decodeDelimited = function decodeDelimited(reader) { + StringValue.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an EnumValueOptions message. + * Verifies a StringValue message. * @function verify - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - EnumValueOptions.verify = function verify(message) { + StringValue.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isString(message.value)) + return "value: string expected"; return null; }; /** - * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * Creates a StringValue message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static * @param {Object.} object Plain object - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @returns {google.protobuf.StringValue} StringValue */ - EnumValueOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumValueOptions) + StringValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.StringValue) return object; - var message = new $root.google.protobuf.EnumValueOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } + var message = new $root.google.protobuf.StringValue(); + if (object.value != null) + message.value = String(object.value); return message; }; /** - * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * Creates a plain object from a StringValue message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @static - * @param {google.protobuf.EnumValueOptions} message EnumValueOptions + * @param {google.protobuf.StringValue} message StringValue * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - EnumValueOptions.toObject = function toObject(message, options) { + StringValue.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; if (options.defaults) - object.deprecated = false; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } + object.value = ""; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; return object; }; /** - * Converts this EnumValueOptions to JSON. + * Converts this StringValue to JSON. * @function toJSON - * @memberof google.protobuf.EnumValueOptions + * @memberof google.protobuf.StringValue * @instance * @returns {Object.} JSON object */ - EnumValueOptions.prototype.toJSON = function toJSON() { + StringValue.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return EnumValueOptions; + return StringValue; })(); - protobuf.ServiceOptions = (function() { + protobuf.BytesValue = (function() { /** - * Properties of a ServiceOptions. + * Properties of a BytesValue. * @memberof google.protobuf - * @interface IServiceOptions - * @property {boolean|null} [deprecated] ServiceOptions deprecated - * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption - * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost - * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes + * @interface IBytesValue + * @property {Uint8Array|null} [value] BytesValue value */ /** - * Constructs a new ServiceOptions. + * Constructs a new BytesValue. * @memberof google.protobuf - * @classdesc Represents a ServiceOptions. - * @implements IServiceOptions + * @classdesc Represents a BytesValue. + * @implements IBytesValue * @constructor - * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + * @param {google.protobuf.IBytesValue=} [properties] Properties to set */ - function ServiceOptions(properties) { - this.uninterpretedOption = []; + function BytesValue(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -17595,117 +24701,75 @@ } /** - * ServiceOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype.deprecated = false; - - /** - * ServiceOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * ServiceOptions .google.api.defaultHost. - * @member {string} .google.api.defaultHost - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype[".google.api.defaultHost"] = ""; - - /** - * ServiceOptions .google.api.oauthScopes. - * @member {string} .google.api.oauthScopes - * @memberof google.protobuf.ServiceOptions + * BytesValue value. + * @member {Uint8Array} value + * @memberof google.protobuf.BytesValue * @instance */ - ServiceOptions.prototype[".google.api.oauthScopes"] = ""; + BytesValue.prototype.value = $util.newBuffer([]); /** - * Creates a new ServiceOptions instance using the specified properties. + * Creates a new BytesValue instance using the specified properties. * @function create - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static - * @param {google.protobuf.IServiceOptions=} [properties] Properties to set - * @returns {google.protobuf.ServiceOptions} ServiceOptions instance + * @param {google.protobuf.IBytesValue=} [properties] Properties to set + * @returns {google.protobuf.BytesValue} BytesValue instance */ - ServiceOptions.create = function create(properties) { - return new ServiceOptions(properties); + BytesValue.create = function create(properties) { + return new BytesValue(properties); }; /** - * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. * @function encode - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static - * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ServiceOptions.encode = function encode(message, writer) { + BytesValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) - writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); - if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) - writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.value); return writer; }; /** - * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static - * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - ServiceOptions.encodeDelimited = function encodeDelimited(message, writer) { + BytesValue.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a ServiceOptions message from the specified reader or buffer. + * Decodes a BytesValue message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @returns {google.protobuf.BytesValue} BytesValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceOptions.decode = function decode(reader, length) { + BytesValue.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BytesValue(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 33: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1049: - message[".google.api.defaultHost"] = reader.string(); - break; - case 1050: - message[".google.api.oauthScopes"] = reader.string(); + case 1: + message.value = reader.bytes(); break; default: reader.skipType(tag & 7); @@ -17716,156 +24780,117 @@ }; /** - * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * Decodes a BytesValue message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @returns {google.protobuf.BytesValue} BytesValue * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceOptions.decodeDelimited = function decodeDelimited(reader) { + BytesValue.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a ServiceOptions message. + * Verifies a BytesValue message. * @function verify - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - ServiceOptions.verify = function verify(message) { + BytesValue.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) - if (!$util.isString(message[".google.api.defaultHost"])) - return ".google.api.defaultHost: string expected"; - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) - if (!$util.isString(message[".google.api.oauthScopes"])) - return ".google.api.oauthScopes: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; return null; }; /** - * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static * @param {Object.} object Plain object - * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @returns {google.protobuf.BytesValue} BytesValue */ - ServiceOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ServiceOptions) + BytesValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.BytesValue) return object; - var message = new $root.google.protobuf.ServiceOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.defaultHost"] != null) - message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); - if (object[".google.api.oauthScopes"] != null) - message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); + var message = new $root.google.protobuf.BytesValue(); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length) + message.value = object.value; return message; }; /** - * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * Creates a plain object from a BytesValue message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @static - * @param {google.protobuf.ServiceOptions} message ServiceOptions + * @param {google.protobuf.BytesValue} message BytesValue * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - ServiceOptions.toObject = function toObject(message, options) { + BytesValue.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.deprecated = false; - object[".google.api.defaultHost"] = ""; - object[".google.api.oauthScopes"] = ""; - } - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) - object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) - object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; + if (options.defaults) + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; return object; }; /** - * Converts this ServiceOptions to JSON. + * Converts this BytesValue to JSON. * @function toJSON - * @memberof google.protobuf.ServiceOptions + * @memberof google.protobuf.BytesValue * @instance * @returns {Object.} JSON object */ - ServiceOptions.prototype.toJSON = function toJSON() { + BytesValue.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return ServiceOptions; + return BytesValue; })(); - protobuf.MethodOptions = (function() { + protobuf.Any = (function() { /** - * Properties of a MethodOptions. + * Properties of an Any. * @memberof google.protobuf - * @interface IMethodOptions - * @property {boolean|null} [deprecated] MethodOptions deprecated - * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel - * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption - * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http - * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature + * @interface IAny + * @property {string|null} [type_url] Any type_url + * @property {Uint8Array|null} [value] Any value */ /** - * Constructs a new MethodOptions. + * Constructs a new Any. * @memberof google.protobuf - * @classdesc Represents a MethodOptions. - * @implements IMethodOptions + * @classdesc Represents an Any. + * @implements IAny * @constructor - * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + * @param {google.protobuf.IAny=} [properties] Properties to set */ - function MethodOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.methodSignature"] = []; + function Any(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -17873,133 +24898,88 @@ } /** - * MethodOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.deprecated = false; - - /** - * MethodOptions idempotencyLevel. - * @member {google.protobuf.MethodOptions.IdempotencyLevel} idempotencyLevel - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.idempotencyLevel = 0; - - /** - * MethodOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * MethodOptions .google.api.http. - * @member {google.api.IHttpRule|null|undefined} .google.api.http - * @memberof google.protobuf.MethodOptions + * Any type_url. + * @member {string} type_url + * @memberof google.protobuf.Any * @instance */ - MethodOptions.prototype[".google.api.http"] = null; + Any.prototype.type_url = ""; /** - * MethodOptions .google.api.methodSignature. - * @member {Array.} .google.api.methodSignature - * @memberof google.protobuf.MethodOptions + * Any value. + * @member {Uint8Array} value + * @memberof google.protobuf.Any * @instance */ - MethodOptions.prototype[".google.api.methodSignature"] = $util.emptyArray; + Any.prototype.value = $util.newBuffer([]); /** - * Creates a new MethodOptions instance using the specified properties. + * Creates a new Any instance using the specified properties. * @function create - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static - * @param {google.protobuf.IMethodOptions=} [properties] Properties to set - * @returns {google.protobuf.MethodOptions} MethodOptions instance + * @param {google.protobuf.IAny=} [properties] Properties to set + * @returns {google.protobuf.Any} Any instance */ - MethodOptions.create = function create(properties) { - return new MethodOptions(properties); + Any.create = function create(properties) { + return new Any(properties); }; /** - * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. * @function encode - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static - * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {google.protobuf.IAny} message Any message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - MethodOptions.encode = function encode(message, writer) { + Any.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) - writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) - for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) - writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); - if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) - $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); + if (message.type_url != null && Object.hasOwnProperty.call(message, "type_url")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type_url); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); return writer; }; /** - * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static - * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {google.protobuf.IAny} message Any message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - MethodOptions.encodeDelimited = function encodeDelimited(message, writer) { + Any.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a MethodOptions message from the specified reader or buffer. + * Decodes an Any message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MethodOptions} MethodOptions + * @returns {google.protobuf.Any} Any * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MethodOptions.decode = function decode(reader, length) { + Any.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 33: - message.deprecated = reader.bool(); - break; - case 34: - message.idempotencyLevel = reader.int32(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 72295728: - message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); + case 1: + message.type_url = reader.string(); break; - case 1051: - if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) - message[".google.api.methodSignature"] = []; - message[".google.api.methodSignature"].push(reader.string()); + case 2: + message.value = reader.bytes(); break; default: reader.skipType(tag & 7); @@ -18010,217 +24990,124 @@ }; /** - * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * Decodes an Any message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MethodOptions} MethodOptions + * @returns {google.protobuf.Any} Any * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MethodOptions.decodeDelimited = function decodeDelimited(reader) { + Any.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a MethodOptions message. + * Verifies an Any message. * @function verify - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - MethodOptions.verify = function verify(message) { + Any.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) - switch (message.idempotencyLevel) { - default: - return "idempotencyLevel: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) { - var error = $root.google.api.HttpRule.verify(message[".google.api.http"]); - if (error) - return ".google.api.http." + error; - } - if (message[".google.api.methodSignature"] != null && message.hasOwnProperty(".google.api.methodSignature")) { - if (!Array.isArray(message[".google.api.methodSignature"])) - return ".google.api.methodSignature: array expected"; - for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) - if (!$util.isString(message[".google.api.methodSignature"][i])) - return ".google.api.methodSignature: string[] expected"; - } + if (message.type_url != null && message.hasOwnProperty("type_url")) + if (!$util.isString(message.type_url)) + return "type_url: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; return null; }; /** - * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * Creates an Any message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.MethodOptions} MethodOptions - */ - MethodOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MethodOptions) - return object; - var message = new $root.google.protobuf.MethodOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - switch (object.idempotencyLevel) { - case "IDEMPOTENCY_UNKNOWN": - case 0: - message.idempotencyLevel = 0; - break; - case "NO_SIDE_EFFECTS": - case 1: - message.idempotencyLevel = 1; - break; - case "IDEMPOTENT": - case 2: - message.idempotencyLevel = 2; - break; - } - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.http"] != null) { - if (typeof object[".google.api.http"] !== "object") - throw TypeError(".google.protobuf.MethodOptions..google.api.http: object expected"); - message[".google.api.http"] = $root.google.api.HttpRule.fromObject(object[".google.api.http"]); - } - if (object[".google.api.methodSignature"]) { - if (!Array.isArray(object[".google.api.methodSignature"])) - throw TypeError(".google.protobuf.MethodOptions..google.api.methodSignature: array expected"); - message[".google.api.methodSignature"] = []; - for (var i = 0; i < object[".google.api.methodSignature"].length; ++i) - message[".google.api.methodSignature"][i] = String(object[".google.api.methodSignature"][i]); - } + * @param {Object.} object Plain object + * @returns {google.protobuf.Any} Any + */ + Any.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Any) + return object; + var message = new $root.google.protobuf.Any(); + if (object.type_url != null) + message.type_url = String(object.type_url); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length) + message.value = object.value; return message; }; /** - * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * Creates a plain object from an Any message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @static - * @param {google.protobuf.MethodOptions} message MethodOptions + * @param {google.protobuf.Any} message Any * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - MethodOptions.toObject = function toObject(message, options) { + Any.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.methodSignature"] = []; - } if (options.defaults) { - object.deprecated = false; - object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; - object[".google.api.http"] = null; - } - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) - object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length) { - object[".google.api.methodSignature"] = []; - for (var j = 0; j < message[".google.api.methodSignature"].length; ++j) - object[".google.api.methodSignature"][j] = message[".google.api.methodSignature"][j]; + object.type_url = ""; + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } } - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) - object[".google.api.http"] = $root.google.api.HttpRule.toObject(message[".google.api.http"], options); + if (message.type_url != null && message.hasOwnProperty("type_url")) + object.type_url = message.type_url; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; return object; }; /** - * Converts this MethodOptions to JSON. + * Converts this Any to JSON. * @function toJSON - * @memberof google.protobuf.MethodOptions + * @memberof google.protobuf.Any * @instance * @returns {Object.} JSON object */ - MethodOptions.prototype.toJSON = function toJSON() { + Any.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - /** - * IdempotencyLevel enum. - * @name google.protobuf.MethodOptions.IdempotencyLevel - * @enum {number} - * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value - * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value - * @property {number} IDEMPOTENT=2 IDEMPOTENT value - */ - MethodOptions.IdempotencyLevel = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "IDEMPOTENCY_UNKNOWN"] = 0; - values[valuesById[1] = "NO_SIDE_EFFECTS"] = 1; - values[valuesById[2] = "IDEMPOTENT"] = 2; - return values; - })(); - - return MethodOptions; + return Any; })(); - protobuf.UninterpretedOption = (function() { + protobuf.Empty = (function() { /** - * Properties of an UninterpretedOption. + * Properties of an Empty. * @memberof google.protobuf - * @interface IUninterpretedOption - * @property {Array.|null} [name] UninterpretedOption name - * @property {string|null} [identifierValue] UninterpretedOption identifierValue - * @property {number|Long|null} [positiveIntValue] UninterpretedOption positiveIntValue - * @property {number|Long|null} [negativeIntValue] UninterpretedOption negativeIntValue - * @property {number|null} [doubleValue] UninterpretedOption doubleValue - * @property {Uint8Array|null} [stringValue] UninterpretedOption stringValue - * @property {string|null} [aggregateValue] UninterpretedOption aggregateValue + * @interface IEmpty */ /** - * Constructs a new UninterpretedOption. + * Constructs a new Empty. * @memberof google.protobuf - * @classdesc Represents an UninterpretedOption. - * @implements IUninterpretedOption + * @classdesc Represents an Empty. + * @implements IEmpty * @constructor - * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + * @param {google.protobuf.IEmpty=} [properties] Properties to set */ - function UninterpretedOption(properties) { - this.name = []; + function Empty(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -18228,157 +25115,63 @@ } /** - * UninterpretedOption name. - * @member {Array.} name - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.name = $util.emptyArray; - - /** - * UninterpretedOption identifierValue. - * @member {string} identifierValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.identifierValue = ""; - - /** - * UninterpretedOption positiveIntValue. - * @member {number|Long} positiveIntValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.positiveIntValue = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * UninterpretedOption negativeIntValue. - * @member {number|Long} negativeIntValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.negativeIntValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * UninterpretedOption doubleValue. - * @member {number} doubleValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.doubleValue = 0; - - /** - * UninterpretedOption stringValue. - * @member {Uint8Array} stringValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.stringValue = $util.newBuffer([]); - - /** - * UninterpretedOption aggregateValue. - * @member {string} aggregateValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.aggregateValue = ""; - - /** - * Creates a new UninterpretedOption instance using the specified properties. + * Creates a new Empty instance using the specified properties. * @function create - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static - * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption instance + * @param {google.protobuf.IEmpty=} [properties] Properties to set + * @returns {google.protobuf.Empty} Empty instance */ - UninterpretedOption.create = function create(properties) { - return new UninterpretedOption(properties); + Empty.create = function create(properties) { + return new Empty(properties); }; /** - * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. * @function encode - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static - * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - UninterpretedOption.encode = function encode(message, writer) { + Empty.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.name.length) - for (var i = 0; i < message.name.length; ++i) - $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); - if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) - writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); - if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) - writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); - if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) - writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); - if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) - writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); - if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) - writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); return writer; }; /** - * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static - * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - UninterpretedOption.encodeDelimited = function encodeDelimited(message, writer) { + Empty.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an UninterpretedOption message from the specified reader or buffer. + * Decodes an Empty message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @returns {google.protobuf.Empty} Empty * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UninterpretedOption.decode = function decode(reader, length) { + Empty.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - if (!(message.name && message.name.length)) - message.name = []; - message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); - break; - case 3: - message.identifierValue = reader.string(); - break; - case 4: - message.positiveIntValue = reader.uint64(); - break; - case 5: - message.negativeIntValue = reader.int64(); - break; - case 6: - message.doubleValue = reader.double(); - break; - case 7: - message.stringValue = reader.bytes(); - break; - case 8: - message.aggregateValue = reader.string(); - break; default: reader.skipType(tag & 7); break; @@ -18388,422 +25181,347 @@ }; /** - * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * Decodes an Empty message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @returns {google.protobuf.Empty} Empty * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UninterpretedOption.decodeDelimited = function decodeDelimited(reader) { + Empty.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an UninterpretedOption message. + * Verifies an Empty message. * @function verify - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - UninterpretedOption.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) { - if (!Array.isArray(message.name)) - return "name: array expected"; - for (var i = 0; i < message.name.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.NamePart.verify(message.name[i]); - if (error) - return "name." + error; - } - } - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) - if (!$util.isString(message.identifierValue)) - return "identifierValue: string expected"; - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) - if (!$util.isInteger(message.positiveIntValue) && !(message.positiveIntValue && $util.isInteger(message.positiveIntValue.low) && $util.isInteger(message.positiveIntValue.high))) - return "positiveIntValue: integer|Long expected"; - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) - if (!$util.isInteger(message.negativeIntValue) && !(message.negativeIntValue && $util.isInteger(message.negativeIntValue.low) && $util.isInteger(message.negativeIntValue.high))) - return "negativeIntValue: integer|Long expected"; - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) - if (typeof message.doubleValue !== "number") - return "doubleValue: number expected"; - if (message.stringValue != null && message.hasOwnProperty("stringValue")) - if (!(message.stringValue && typeof message.stringValue.length === "number" || $util.isString(message.stringValue))) - return "stringValue: buffer expected"; - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) - if (!$util.isString(message.aggregateValue)) - return "aggregateValue: string expected"; + * @memberof google.protobuf.Empty + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Empty.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; return null; }; /** - * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * Creates an Empty message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static * @param {Object.} object Plain object - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @returns {google.protobuf.Empty} Empty */ - UninterpretedOption.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UninterpretedOption) + Empty.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Empty) return object; - var message = new $root.google.protobuf.UninterpretedOption(); - if (object.name) { - if (!Array.isArray(object.name)) - throw TypeError(".google.protobuf.UninterpretedOption.name: array expected"); - message.name = []; - for (var i = 0; i < object.name.length; ++i) { - if (typeof object.name[i] !== "object") - throw TypeError(".google.protobuf.UninterpretedOption.name: object expected"); - message.name[i] = $root.google.protobuf.UninterpretedOption.NamePart.fromObject(object.name[i]); - } - } - if (object.identifierValue != null) - message.identifierValue = String(object.identifierValue); - if (object.positiveIntValue != null) - if ($util.Long) - (message.positiveIntValue = $util.Long.fromValue(object.positiveIntValue)).unsigned = true; - else if (typeof object.positiveIntValue === "string") - message.positiveIntValue = parseInt(object.positiveIntValue, 10); - else if (typeof object.positiveIntValue === "number") - message.positiveIntValue = object.positiveIntValue; - else if (typeof object.positiveIntValue === "object") - message.positiveIntValue = new $util.LongBits(object.positiveIntValue.low >>> 0, object.positiveIntValue.high >>> 0).toNumber(true); - if (object.negativeIntValue != null) - if ($util.Long) - (message.negativeIntValue = $util.Long.fromValue(object.negativeIntValue)).unsigned = false; - else if (typeof object.negativeIntValue === "string") - message.negativeIntValue = parseInt(object.negativeIntValue, 10); - else if (typeof object.negativeIntValue === "number") - message.negativeIntValue = object.negativeIntValue; - else if (typeof object.negativeIntValue === "object") - message.negativeIntValue = new $util.LongBits(object.negativeIntValue.low >>> 0, object.negativeIntValue.high >>> 0).toNumber(); - if (object.doubleValue != null) - message.doubleValue = Number(object.doubleValue); - if (object.stringValue != null) - if (typeof object.stringValue === "string") - $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); - else if (object.stringValue.length) - message.stringValue = object.stringValue; - if (object.aggregateValue != null) - message.aggregateValue = String(object.aggregateValue); - return message; + return new $root.google.protobuf.Empty(); }; /** - * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * Creates a plain object from an Empty message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @static - * @param {google.protobuf.UninterpretedOption} message UninterpretedOption + * @param {google.protobuf.Empty} message Empty * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - UninterpretedOption.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.name = []; - if (options.defaults) { - object.identifierValue = ""; - if ($util.Long) { - var long = new $util.Long(0, 0, true); - object.positiveIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.positiveIntValue = options.longs === String ? "0" : 0; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.negativeIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.negativeIntValue = options.longs === String ? "0" : 0; - object.doubleValue = 0; - if (options.bytes === String) - object.stringValue = ""; - else { - object.stringValue = []; - if (options.bytes !== Array) - object.stringValue = $util.newBuffer(object.stringValue); - } - object.aggregateValue = ""; - } - if (message.name && message.name.length) { - object.name = []; - for (var j = 0; j < message.name.length; ++j) - object.name[j] = $root.google.protobuf.UninterpretedOption.NamePart.toObject(message.name[j], options); - } - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) - object.identifierValue = message.identifierValue; - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) - if (typeof message.positiveIntValue === "number") - object.positiveIntValue = options.longs === String ? String(message.positiveIntValue) : message.positiveIntValue; - else - object.positiveIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.positiveIntValue) : options.longs === Number ? new $util.LongBits(message.positiveIntValue.low >>> 0, message.positiveIntValue.high >>> 0).toNumber(true) : message.positiveIntValue; - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) - if (typeof message.negativeIntValue === "number") - object.negativeIntValue = options.longs === String ? String(message.negativeIntValue) : message.negativeIntValue; - else - object.negativeIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.negativeIntValue) : options.longs === Number ? new $util.LongBits(message.negativeIntValue.low >>> 0, message.negativeIntValue.high >>> 0).toNumber() : message.negativeIntValue; - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) - object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; - if (message.stringValue != null && message.hasOwnProperty("stringValue")) - object.stringValue = options.bytes === String ? $util.base64.encode(message.stringValue, 0, message.stringValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.stringValue) : message.stringValue; - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) - object.aggregateValue = message.aggregateValue; - return object; + Empty.toObject = function toObject() { + return {}; }; /** - * Converts this UninterpretedOption to JSON. + * Converts this Empty to JSON. * @function toJSON - * @memberof google.protobuf.UninterpretedOption + * @memberof google.protobuf.Empty * @instance * @returns {Object.} JSON object */ - UninterpretedOption.prototype.toJSON = function toJSON() { + Empty.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - UninterpretedOption.NamePart = (function() { + return Empty; + })(); - /** - * Properties of a NamePart. - * @memberof google.protobuf.UninterpretedOption - * @interface INamePart - * @property {string} namePart NamePart namePart - * @property {boolean} isExtension NamePart isExtension - */ + return protobuf; + })(); - /** - * Constructs a new NamePart. - * @memberof google.protobuf.UninterpretedOption - * @classdesc Represents a NamePart. - * @implements INamePart - * @constructor - * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set - */ - function NamePart(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + google.api = (function() { - /** - * NamePart namePart. - * @member {string} namePart - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - */ - NamePart.prototype.namePart = ""; + /** + * Namespace api. + * @memberof google + * @namespace + */ + var api = {}; - /** - * NamePart isExtension. - * @member {boolean} isExtension - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - */ - NamePart.prototype.isExtension = false; + api.Http = (function() { - /** - * Creates a new NamePart instance using the specified properties. - * @function create - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart instance - */ - NamePart.create = function create(properties) { - return new NamePart(properties); - }; + /** + * Properties of a Http. + * @memberof google.api + * @interface IHttp + * @property {Array.|null} [rules] Http rules + * @property {boolean|null} [fullyDecodeReservedExpansion] Http fullyDecodeReservedExpansion + */ - /** - * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @function encode - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - NamePart.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - writer.uint32(/* id 1, wireType 2 =*/10).string(message.namePart); - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.isExtension); - return writer; - }; + /** + * Constructs a new Http. + * @memberof google.api + * @classdesc Represents a Http. + * @implements IHttp + * @constructor + * @param {google.api.IHttp=} [properties] Properties to set + */ + function Http(properties) { + this.rules = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - NamePart.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * Http rules. + * @member {Array.} rules + * @memberof google.api.Http + * @instance + */ + Http.prototype.rules = $util.emptyArray; + + /** + * Http fullyDecodeReservedExpansion. + * @member {boolean} fullyDecodeReservedExpansion + * @memberof google.api.Http + * @instance + */ + Http.prototype.fullyDecodeReservedExpansion = false; + + /** + * Creates a new Http instance using the specified properties. + * @function create + * @memberof google.api.Http + * @static + * @param {google.api.IHttp=} [properties] Properties to set + * @returns {google.api.Http} Http instance + */ + Http.create = function create(properties) { + return new Http(properties); + }; - /** - * Decodes a NamePart message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - NamePart.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.namePart = reader.string(); - break; - case 2: - message.isExtension = reader.bool(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - if (!message.hasOwnProperty("namePart")) - throw $util.ProtocolError("missing required 'namePart'", { instance: message }); - if (!message.hasOwnProperty("isExtension")) - throw $util.ProtocolError("missing required 'isExtension'", { instance: message }); - return message; - }; + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encode + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.rules != null && message.rules.length) + for (var i = 0; i < message.rules.length; ++i) + $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); + return writer; + }; - /** - * Decodes a NamePart message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - NamePart.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; - /** - * Verifies a NamePart message. - * @function verify - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - NamePart.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (!$util.isString(message.namePart)) - return "namePart: string expected"; - if (typeof message.isExtension !== "boolean") - return "isExtension: boolean expected"; - return null; - }; + /** + * Decodes a Http message from the specified reader or buffer. + * @function decode + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.rules && message.rules.length)) + message.rules = []; + message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; - /** - * Creates a NamePart message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - */ - NamePart.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) - return object; - var message = new $root.google.protobuf.UninterpretedOption.NamePart(); - if (object.namePart != null) - message.namePart = String(object.namePart); - if (object.isExtension != null) - message.isExtension = Boolean(object.isExtension); - return message; - }; + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * Creates a plain object from a NamePart message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - NamePart.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.namePart = ""; - object.isExtension = false; + /** + * Verifies a Http message. + * @function verify + * @memberof google.api.Http + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Http.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.rules != null && message.hasOwnProperty("rules")) { + if (!Array.isArray(message.rules)) + return "rules: array expected"; + for (var i = 0; i < message.rules.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.rules[i]); + if (error) + return "rules." + error; } - if (message.namePart != null && message.hasOwnProperty("namePart")) - object.namePart = message.namePart; - if (message.isExtension != null && message.hasOwnProperty("isExtension")) - object.isExtension = message.isExtension; + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + if (typeof message.fullyDecodeReservedExpansion !== "boolean") + return "fullyDecodeReservedExpansion: boolean expected"; + return null; + }; + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.Http + * @static + * @param {Object.} object Plain object + * @returns {google.api.Http} Http + */ + Http.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.Http) return object; - }; + var message = new $root.google.api.Http(); + if (object.rules) { + if (!Array.isArray(object.rules)) + throw TypeError(".google.api.Http.rules: array expected"); + message.rules = []; + for (var i = 0; i < object.rules.length; ++i) { + if (typeof object.rules[i] !== "object") + throw TypeError(".google.api.Http.rules: object expected"); + message.rules[i] = $root.google.api.HttpRule.fromObject(object.rules[i]); + } + } + if (object.fullyDecodeReservedExpansion != null) + message.fullyDecodeReservedExpansion = Boolean(object.fullyDecodeReservedExpansion); + return message; + }; - /** - * Converts this NamePart to JSON. - * @function toJSON - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - * @returns {Object.} JSON object - */ - NamePart.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.Http + * @static + * @param {google.api.Http} message Http + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Http.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.rules = []; + if (options.defaults) + object.fullyDecodeReservedExpansion = false; + if (message.rules && message.rules.length) { + object.rules = []; + for (var j = 0; j < message.rules.length; ++j) + object.rules[j] = $root.google.api.HttpRule.toObject(message.rules[j], options); + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + object.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + return object; + }; - return NamePart; - })(); + /** + * Converts this Http to JSON. + * @function toJSON + * @memberof google.api.Http + * @instance + * @returns {Object.} JSON object + */ + Http.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - return UninterpretedOption; + return Http; })(); - protobuf.SourceCodeInfo = (function() { + api.HttpRule = (function() { /** - * Properties of a SourceCodeInfo. - * @memberof google.protobuf - * @interface ISourceCodeInfo - * @property {Array.|null} [location] SourceCodeInfo location + * Properties of a HttpRule. + * @memberof google.api + * @interface IHttpRule + * @property {string|null} [selector] HttpRule selector + * @property {string|null} [get] HttpRule get + * @property {string|null} [put] HttpRule put + * @property {string|null} [post] HttpRule post + * @property {string|null} ["delete"] HttpRule delete + * @property {string|null} [patch] HttpRule patch + * @property {google.api.ICustomHttpPattern|null} [custom] HttpRule custom + * @property {string|null} [body] HttpRule body + * @property {string|null} [responseBody] HttpRule responseBody + * @property {Array.|null} [additionalBindings] HttpRule additionalBindings */ /** - * Constructs a new SourceCodeInfo. - * @memberof google.protobuf - * @classdesc Represents a SourceCodeInfo. - * @implements ISourceCodeInfo + * Constructs a new HttpRule. + * @memberof google.api + * @classdesc Represents a HttpRule. + * @implements IHttpRule * @constructor - * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + * @param {google.api.IHttpRule=} [properties] Properties to set */ - function SourceCodeInfo(properties) { - this.location = []; + function HttpRule(properties) { + this.additionalBindings = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -18811,78 +25529,209 @@ } /** - * SourceCodeInfo location. - * @member {Array.} location - * @memberof google.protobuf.SourceCodeInfo + * HttpRule selector. + * @member {string} selector + * @memberof google.api.HttpRule * @instance */ - SourceCodeInfo.prototype.location = $util.emptyArray; + HttpRule.prototype.selector = ""; /** - * Creates a new SourceCodeInfo instance using the specified properties. + * HttpRule get. + * @member {string|null|undefined} get + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.get = null; + + /** + * HttpRule put. + * @member {string|null|undefined} put + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.put = null; + + /** + * HttpRule post. + * @member {string|null|undefined} post + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.post = null; + + /** + * HttpRule delete. + * @member {string|null|undefined} delete + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype["delete"] = null; + + /** + * HttpRule patch. + * @member {string|null|undefined} patch + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.patch = null; + + /** + * HttpRule custom. + * @member {google.api.ICustomHttpPattern|null|undefined} custom + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.custom = null; + + /** + * HttpRule body. + * @member {string} body + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.body = ""; + + /** + * HttpRule responseBody. + * @member {string} responseBody + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.responseBody = ""; + + /** + * HttpRule additionalBindings. + * @member {Array.} additionalBindings + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.additionalBindings = $util.emptyArray; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * HttpRule pattern. + * @member {"get"|"put"|"post"|"delete"|"patch"|"custom"|undefined} pattern + * @memberof google.api.HttpRule + * @instance + */ + Object.defineProperty(HttpRule.prototype, "pattern", { + get: $util.oneOfGetter($oneOfFields = ["get", "put", "post", "delete", "patch", "custom"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new HttpRule instance using the specified properties. * @function create - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static - * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo instance + * @param {google.api.IHttpRule=} [properties] Properties to set + * @returns {google.api.HttpRule} HttpRule instance */ - SourceCodeInfo.create = function create(properties) { - return new SourceCodeInfo(properties); + HttpRule.create = function create(properties) { + return new HttpRule(properties); }; /** - * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. * @function encode - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static - * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SourceCodeInfo.encode = function encode(message, writer) { + HttpRule.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.location != null && message.location.length) - for (var i = 0; i < message.location.length; ++i) - $root.google.protobuf.SourceCodeInfo.Location.encode(message.location[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); + if (message.get != null && Object.hasOwnProperty.call(message, "get")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); + if (message.put != null && Object.hasOwnProperty.call(message, "put")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); + if (message.post != null && Object.hasOwnProperty.call(message, "post")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); + if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); + if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); + if (message.body != null && Object.hasOwnProperty.call(message, "body")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); + if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) + $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.additionalBindings != null && message.additionalBindings.length) + for (var i = 0; i < message.additionalBindings.length; ++i) + $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); + if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); return writer; }; /** - * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static - * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - SourceCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + HttpRule.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a SourceCodeInfo message from the specified reader or buffer. + * Decodes a HttpRule message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @returns {google.api.HttpRule} HttpRule * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SourceCodeInfo.decode = function decode(reader, length) { + HttpRule.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.location && message.location.length)) - message.location = []; - message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message["delete"] = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + if (!(message.additionalBindings && message.additionalBindings.length)) + message.additionalBindings = []; + message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); break; default: reader.skipType(tag & 7); @@ -18893,467 +25742,481 @@ }; /** - * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * Decodes a HttpRule message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @returns {google.api.HttpRule} HttpRule * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SourceCodeInfo.decodeDelimited = function decodeDelimited(reader) { + HttpRule.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a SourceCodeInfo message. + * Verifies a HttpRule message. * @function verify - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - SourceCodeInfo.verify = function verify(message) { + HttpRule.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.location != null && message.hasOwnProperty("location")) { - if (!Array.isArray(message.location)) - return "location: array expected"; - for (var i = 0; i < message.location.length; ++i) { - var error = $root.google.protobuf.SourceCodeInfo.Location.verify(message.location[i]); + var properties = {}; + if (message.selector != null && message.hasOwnProperty("selector")) + if (!$util.isString(message.selector)) + return "selector: string expected"; + if (message.get != null && message.hasOwnProperty("get")) { + properties.pattern = 1; + if (!$util.isString(message.get)) + return "get: string expected"; + } + if (message.put != null && message.hasOwnProperty("put")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.put)) + return "put: string expected"; + } + if (message.post != null && message.hasOwnProperty("post")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.post)) + return "post: string expected"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message["delete"])) + return "delete: string expected"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.patch)) + return "patch: string expected"; + } + if (message.custom != null && message.hasOwnProperty("custom")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + { + var error = $root.google.api.CustomHttpPattern.verify(message.custom); if (error) - return "location." + error; + return "custom." + error; + } + } + if (message.body != null && message.hasOwnProperty("body")) + if (!$util.isString(message.body)) + return "body: string expected"; + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + if (!$util.isString(message.responseBody)) + return "responseBody: string expected"; + if (message.additionalBindings != null && message.hasOwnProperty("additionalBindings")) { + if (!Array.isArray(message.additionalBindings)) + return "additionalBindings: array expected"; + for (var i = 0; i < message.additionalBindings.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.additionalBindings[i]); + if (error) + return "additionalBindings." + error; } } return null; }; /** - * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.SourceCodeInfo + * @memberof google.api.HttpRule * @static * @param {Object.} object Plain object - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @returns {google.api.HttpRule} HttpRule */ - SourceCodeInfo.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.SourceCodeInfo) + HttpRule.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.HttpRule) return object; - var message = new $root.google.protobuf.SourceCodeInfo(); - if (object.location) { - if (!Array.isArray(object.location)) - throw TypeError(".google.protobuf.SourceCodeInfo.location: array expected"); - message.location = []; - for (var i = 0; i < object.location.length; ++i) { - if (typeof object.location[i] !== "object") - throw TypeError(".google.protobuf.SourceCodeInfo.location: object expected"); - message.location[i] = $root.google.protobuf.SourceCodeInfo.Location.fromObject(object.location[i]); + var message = new $root.google.api.HttpRule(); + if (object.selector != null) + message.selector = String(object.selector); + if (object.get != null) + message.get = String(object.get); + if (object.put != null) + message.put = String(object.put); + if (object.post != null) + message.post = String(object.post); + if (object["delete"] != null) + message["delete"] = String(object["delete"]); + if (object.patch != null) + message.patch = String(object.patch); + if (object.custom != null) { + if (typeof object.custom !== "object") + throw TypeError(".google.api.HttpRule.custom: object expected"); + message.custom = $root.google.api.CustomHttpPattern.fromObject(object.custom); + } + if (object.body != null) + message.body = String(object.body); + if (object.responseBody != null) + message.responseBody = String(object.responseBody); + if (object.additionalBindings) { + if (!Array.isArray(object.additionalBindings)) + throw TypeError(".google.api.HttpRule.additionalBindings: array expected"); + message.additionalBindings = []; + for (var i = 0; i < object.additionalBindings.length; ++i) { + if (typeof object.additionalBindings[i] !== "object") + throw TypeError(".google.api.HttpRule.additionalBindings: object expected"); + message.additionalBindings[i] = $root.google.api.HttpRule.fromObject(object.additionalBindings[i]); } } return message; }; /** - * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.SourceCodeInfo + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.HttpRule + * @static + * @param {google.api.HttpRule} message HttpRule + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + HttpRule.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.additionalBindings = []; + if (options.defaults) { + object.selector = ""; + object.body = ""; + object.responseBody = ""; + } + if (message.selector != null && message.hasOwnProperty("selector")) + object.selector = message.selector; + if (message.get != null && message.hasOwnProperty("get")) { + object.get = message.get; + if (options.oneofs) + object.pattern = "get"; + } + if (message.put != null && message.hasOwnProperty("put")) { + object.put = message.put; + if (options.oneofs) + object.pattern = "put"; + } + if (message.post != null && message.hasOwnProperty("post")) { + object.post = message.post; + if (options.oneofs) + object.pattern = "post"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + object["delete"] = message["delete"]; + if (options.oneofs) + object.pattern = "delete"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + object.patch = message.patch; + if (options.oneofs) + object.pattern = "patch"; + } + if (message.body != null && message.hasOwnProperty("body")) + object.body = message.body; + if (message.custom != null && message.hasOwnProperty("custom")) { + object.custom = $root.google.api.CustomHttpPattern.toObject(message.custom, options); + if (options.oneofs) + object.pattern = "custom"; + } + if (message.additionalBindings && message.additionalBindings.length) { + object.additionalBindings = []; + for (var j = 0; j < message.additionalBindings.length; ++j) + object.additionalBindings[j] = $root.google.api.HttpRule.toObject(message.additionalBindings[j], options); + } + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + object.responseBody = message.responseBody; + return object; + }; + + /** + * Converts this HttpRule to JSON. + * @function toJSON + * @memberof google.api.HttpRule + * @instance + * @returns {Object.} JSON object + */ + HttpRule.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return HttpRule; + })(); + + api.CustomHttpPattern = (function() { + + /** + * Properties of a CustomHttpPattern. + * @memberof google.api + * @interface ICustomHttpPattern + * @property {string|null} [kind] CustomHttpPattern kind + * @property {string|null} [path] CustomHttpPattern path + */ + + /** + * Constructs a new CustomHttpPattern. + * @memberof google.api + * @classdesc Represents a CustomHttpPattern. + * @implements ICustomHttpPattern + * @constructor + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + */ + function CustomHttpPattern(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CustomHttpPattern kind. + * @member {string} kind + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.kind = ""; + + /** + * CustomHttpPattern path. + * @member {string} path + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.path = ""; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @function create + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + * @returns {google.api.CustomHttpPattern} CustomHttpPattern instance + */ + CustomHttpPattern.create = function create(properties) { + return new CustomHttpPattern(properties); + }; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encode + * @memberof google.api.CustomHttpPattern * @static - * @param {google.protobuf.SourceCodeInfo} message SourceCodeInfo - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - SourceCodeInfo.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.location = []; - if (message.location && message.location.length) { - object.location = []; - for (var j = 0; j < message.location.length; ++j) - object.location[j] = $root.google.protobuf.SourceCodeInfo.Location.toObject(message.location[j], options); - } - return object; + CustomHttpPattern.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); + if (message.path != null && Object.hasOwnProperty.call(message, "path")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); + return writer; }; /** - * Converts this SourceCodeInfo to JSON. - * @function toJSON - * @memberof google.protobuf.SourceCodeInfo - * @instance - * @returns {Object.} JSON object + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer */ - SourceCodeInfo.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + CustomHttpPattern.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); }; - SourceCodeInfo.Location = (function() { - - /** - * Properties of a Location. - * @memberof google.protobuf.SourceCodeInfo - * @interface ILocation - * @property {Array.|null} [path] Location path - * @property {Array.|null} [span] Location span - * @property {string|null} [leadingComments] Location leadingComments - * @property {string|null} [trailingComments] Location trailingComments - * @property {Array.|null} [leadingDetachedComments] Location leadingDetachedComments - */ - - /** - * Constructs a new Location. - * @memberof google.protobuf.SourceCodeInfo - * @classdesc Represents a Location. - * @implements ILocation - * @constructor - * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set - */ - function Location(properties) { - this.path = []; - this.span = []; - this.leadingDetachedComments = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Location path. - * @member {Array.} path - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.path = $util.emptyArray; - - /** - * Location span. - * @member {Array.} span - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.span = $util.emptyArray; - - /** - * Location leadingComments. - * @member {string} leadingComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.leadingComments = ""; - - /** - * Location trailingComments. - * @member {string} trailingComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.trailingComments = ""; - - /** - * Location leadingDetachedComments. - * @member {Array.} leadingDetachedComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.leadingDetachedComments = $util.emptyArray; - - /** - * Creates a new Location instance using the specified properties. - * @function create - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set - * @returns {google.protobuf.SourceCodeInfo.Location} Location instance - */ - Location.create = function create(properties) { - return new Location(properties); - }; - - /** - * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @function encode - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Location.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.path != null && message.path.length) { - writer.uint32(/* id 1, wireType 2 =*/10).fork(); - for (var i = 0; i < message.path.length; ++i) - writer.int32(message.path[i]); - writer.ldelim(); - } - if (message.span != null && message.span.length) { - writer.uint32(/* id 2, wireType 2 =*/18).fork(); - for (var i = 0; i < message.span.length; ++i) - writer.int32(message.span[i]); - writer.ldelim(); - } - if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); - if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) - writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); - if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) - for (var i = 0; i < message.leadingDetachedComments.length; ++i) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.leadingDetachedComments[i]); - return writer; - }; - - /** - * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Location.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Location message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.SourceCodeInfo.Location} Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Location.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - case 2: - if (!(message.span && message.span.length)) - message.span = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.span.push(reader.int32()); - } else - message.span.push(reader.int32()); - break; - case 3: - message.leadingComments = reader.string(); - break; - case 4: - message.trailingComments = reader.string(); - break; - case 6: - if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) - message.leadingDetachedComments = []; - message.leadingDetachedComments.push(reader.string()); - break; - default: - reader.skipType(tag & 7); - break; - } + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @function decode + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; } - return message; - }; - - /** - * Decodes a Location message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.SourceCodeInfo.Location} Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Location.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + } + return message; + }; - /** - * Verifies a Location message. - * @function verify - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Location.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.path != null && message.hasOwnProperty("path")) { - if (!Array.isArray(message.path)) - return "path: array expected"; - for (var i = 0; i < message.path.length; ++i) - if (!$util.isInteger(message.path[i])) - return "path: integer[] expected"; - } - if (message.span != null && message.hasOwnProperty("span")) { - if (!Array.isArray(message.span)) - return "span: array expected"; - for (var i = 0; i < message.span.length; ++i) - if (!$util.isInteger(message.span[i])) - return "span: integer[] expected"; - } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) - if (!$util.isString(message.leadingComments)) - return "leadingComments: string expected"; - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) - if (!$util.isString(message.trailingComments)) - return "trailingComments: string expected"; - if (message.leadingDetachedComments != null && message.hasOwnProperty("leadingDetachedComments")) { - if (!Array.isArray(message.leadingDetachedComments)) - return "leadingDetachedComments: array expected"; - for (var i = 0; i < message.leadingDetachedComments.length; ++i) - if (!$util.isString(message.leadingDetachedComments[i])) - return "leadingDetachedComments: string[] expected"; - } - return null; - }; + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; - /** - * Creates a Location message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.SourceCodeInfo.Location} Location - */ - Location.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.SourceCodeInfo.Location) - return object; - var message = new $root.google.protobuf.SourceCodeInfo.Location(); - if (object.path) { - if (!Array.isArray(object.path)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.path: array expected"); - message.path = []; - for (var i = 0; i < object.path.length; ++i) - message.path[i] = object.path[i] | 0; - } - if (object.span) { - if (!Array.isArray(object.span)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.span: array expected"); - message.span = []; - for (var i = 0; i < object.span.length; ++i) - message.span[i] = object.span[i] | 0; - } - if (object.leadingComments != null) - message.leadingComments = String(object.leadingComments); - if (object.trailingComments != null) - message.trailingComments = String(object.trailingComments); - if (object.leadingDetachedComments) { - if (!Array.isArray(object.leadingDetachedComments)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.leadingDetachedComments: array expected"); - message.leadingDetachedComments = []; - for (var i = 0; i < object.leadingDetachedComments.length; ++i) - message.leadingDetachedComments[i] = String(object.leadingDetachedComments[i]); - } - return message; - }; + /** + * Verifies a CustomHttpPattern message. + * @function verify + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CustomHttpPattern.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.kind != null && message.hasOwnProperty("kind")) + if (!$util.isString(message.kind)) + return "kind: string expected"; + if (message.path != null && message.hasOwnProperty("path")) + if (!$util.isString(message.path)) + return "path: string expected"; + return null; + }; - /** - * Creates a plain object from a Location message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.Location} message Location - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Location.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.path = []; - object.span = []; - object.leadingDetachedComments = []; - } - if (options.defaults) { - object.leadingComments = ""; - object.trailingComments = ""; - } - if (message.path && message.path.length) { - object.path = []; - for (var j = 0; j < message.path.length; ++j) - object.path[j] = message.path[j]; - } - if (message.span && message.span.length) { - object.span = []; - for (var j = 0; j < message.span.length; ++j) - object.span[j] = message.span[j]; - } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) - object.leadingComments = message.leadingComments; - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) - object.trailingComments = message.trailingComments; - if (message.leadingDetachedComments && message.leadingDetachedComments.length) { - object.leadingDetachedComments = []; - for (var j = 0; j < message.leadingDetachedComments.length; ++j) - object.leadingDetachedComments[j] = message.leadingDetachedComments[j]; - } + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} object Plain object + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + */ + CustomHttpPattern.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.CustomHttpPattern) return object; - }; + var message = new $root.google.api.CustomHttpPattern(); + if (object.kind != null) + message.kind = String(object.kind); + if (object.path != null) + message.path = String(object.path); + return message; + }; - /** - * Converts this Location to JSON. - * @function toJSON - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - * @returns {Object.} JSON object - */ - Location.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.CustomHttpPattern} message CustomHttpPattern + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CustomHttpPattern.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.kind = ""; + object.path = ""; + } + if (message.kind != null && message.hasOwnProperty("kind")) + object.kind = message.kind; + if (message.path != null && message.hasOwnProperty("path")) + object.path = message.path; + return object; + }; - return Location; - })(); + /** + * Converts this CustomHttpPattern to JSON. + * @function toJSON + * @memberof google.api.CustomHttpPattern + * @instance + * @returns {Object.} JSON object + */ + CustomHttpPattern.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - return SourceCodeInfo; + return CustomHttpPattern; + })(); + + /** + * FieldBehavior enum. + * @name google.api.FieldBehavior + * @enum {number} + * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value + * @property {number} OPTIONAL=1 OPTIONAL value + * @property {number} REQUIRED=2 REQUIRED value + * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value + * @property {number} INPUT_ONLY=4 INPUT_ONLY value + * @property {number} IMMUTABLE=5 IMMUTABLE value + * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value + */ + api.FieldBehavior = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "FIELD_BEHAVIOR_UNSPECIFIED"] = 0; + values[valuesById[1] = "OPTIONAL"] = 1; + values[valuesById[2] = "REQUIRED"] = 2; + values[valuesById[3] = "OUTPUT_ONLY"] = 3; + values[valuesById[4] = "INPUT_ONLY"] = 4; + values[valuesById[5] = "IMMUTABLE"] = 5; + values[valuesById[6] = "UNORDERED_LIST"] = 6; + return values; })(); - protobuf.GeneratedCodeInfo = (function() { + api.ResourceDescriptor = (function() { /** - * Properties of a GeneratedCodeInfo. - * @memberof google.protobuf - * @interface IGeneratedCodeInfo - * @property {Array.|null} [annotation] GeneratedCodeInfo annotation + * Properties of a ResourceDescriptor. + * @memberof google.api + * @interface IResourceDescriptor + * @property {string|null} [type] ResourceDescriptor type + * @property {Array.|null} [pattern] ResourceDescriptor pattern + * @property {string|null} [nameField] ResourceDescriptor nameField + * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history + * @property {string|null} [plural] ResourceDescriptor plural + * @property {string|null} [singular] ResourceDescriptor singular + * @property {Array.|null} [style] ResourceDescriptor style */ /** - * Constructs a new GeneratedCodeInfo. - * @memberof google.protobuf - * @classdesc Represents a GeneratedCodeInfo. - * @implements IGeneratedCodeInfo + * Constructs a new ResourceDescriptor. + * @memberof google.api + * @classdesc Represents a ResourceDescriptor. + * @implements IResourceDescriptor * @constructor - * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + * @param {google.api.IResourceDescriptor=} [properties] Properties to set */ - function GeneratedCodeInfo(properties) { - this.annotation = []; + function ResourceDescriptor(properties) { + this.pattern = []; + this.style = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -19361,486 +26224,417 @@ } /** - * GeneratedCodeInfo annotation. - * @member {Array.} annotation - * @memberof google.protobuf.GeneratedCodeInfo + * ResourceDescriptor type. + * @member {string} type + * @memberof google.api.ResourceDescriptor * @instance */ - GeneratedCodeInfo.prototype.annotation = $util.emptyArray; + ResourceDescriptor.prototype.type = ""; /** - * Creates a new GeneratedCodeInfo instance using the specified properties. + * ResourceDescriptor pattern. + * @member {Array.} pattern + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.pattern = $util.emptyArray; + + /** + * ResourceDescriptor nameField. + * @member {string} nameField + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.nameField = ""; + + /** + * ResourceDescriptor history. + * @member {google.api.ResourceDescriptor.History} history + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.history = 0; + + /** + * ResourceDescriptor plural. + * @member {string} plural + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.plural = ""; + + /** + * ResourceDescriptor singular. + * @member {string} singular + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.singular = ""; + + /** + * ResourceDescriptor style. + * @member {Array.} style + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.style = $util.emptyArray; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. * @function create - * @memberof google.protobuf.GeneratedCodeInfo + * @memberof google.api.ResourceDescriptor * @static - * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo instance + * @param {google.api.IResourceDescriptor=} [properties] Properties to set + * @returns {google.api.ResourceDescriptor} ResourceDescriptor instance */ - GeneratedCodeInfo.create = function create(properties) { - return new GeneratedCodeInfo(properties); + ResourceDescriptor.create = function create(properties) { + return new ResourceDescriptor(properties); }; /** - * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. * @function encode - * @memberof google.protobuf.GeneratedCodeInfo + * @memberof google.api.ResourceDescriptor * @static - * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - GeneratedCodeInfo.encode = function encode(message, writer) { + ResourceDescriptor.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.annotation != null && message.annotation.length) - for (var i = 0; i < message.annotation.length; ++i) - $root.google.protobuf.GeneratedCodeInfo.Annotation.encode(message.annotation[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.pattern != null && message.pattern.length) + for (var i = 0; i < message.pattern.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); + if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); + if (message.history != null && Object.hasOwnProperty.call(message, "history")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); + if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); + if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); + if (message.style != null && message.style.length) { + writer.uint32(/* id 10, wireType 2 =*/82).fork(); + for (var i = 0; i < message.style.length; ++i) + writer.int32(message.style[i]); + writer.ldelim(); + } return writer; }; /** - * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo + * @memberof google.api.ResourceDescriptor * @static - * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - GeneratedCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + ResourceDescriptor.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * Decodes a ResourceDescriptor message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.GeneratedCodeInfo + * @memberof google.api.ResourceDescriptor * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + * @returns {google.api.ResourceDescriptor} ResourceDescriptor * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - GeneratedCodeInfo.decode = function decode(reader, length) { + ResourceDescriptor.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - if (!(message.annotation && message.annotation.length)) - message.annotation = []; - message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); + message.type = reader.string(); + break; + case 2: + if (!(message.pattern && message.pattern.length)) + message.pattern = []; + message.pattern.push(reader.string()); + break; + case 3: + message.nameField = reader.string(); + break; + case 4: + message.history = reader.int32(); + break; + case 5: + message.plural = reader.string(); + break; + case 6: + message.singular = reader.string(); + break; + case 10: + if (!(message.style && message.style.length)) + message.style = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.style.push(reader.int32()); + } else + message.style.push(reader.int32()); break; default: reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - GeneratedCodeInfo.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a GeneratedCodeInfo message. - * @function verify - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - GeneratedCodeInfo.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.annotation != null && message.hasOwnProperty("annotation")) { - if (!Array.isArray(message.annotation)) - return "annotation: array expected"; - for (var i = 0; i < message.annotation.length; ++i) { - var error = $root.google.protobuf.GeneratedCodeInfo.Annotation.verify(message.annotation[i]); - if (error) - return "annotation." + error; - } - } - return null; - }; - - /** - * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo - */ - GeneratedCodeInfo.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.GeneratedCodeInfo) - return object; - var message = new $root.google.protobuf.GeneratedCodeInfo(); - if (object.annotation) { - if (!Array.isArray(object.annotation)) - throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: array expected"); - message.annotation = []; - for (var i = 0; i < object.annotation.length; ++i) { - if (typeof object.annotation[i] !== "object") - throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: object expected"); - message.annotation[i] = $root.google.protobuf.GeneratedCodeInfo.Annotation.fromObject(object.annotation[i]); + break; } } return message; }; /** - * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.GeneratedCodeInfo + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ResourceDescriptor * @static - * @param {google.protobuf.GeneratedCodeInfo} message GeneratedCodeInfo - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - GeneratedCodeInfo.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.annotation = []; - if (message.annotation && message.annotation.length) { - object.annotation = []; - for (var j = 0; j < message.annotation.length; ++j) - object.annotation[j] = $root.google.protobuf.GeneratedCodeInfo.Annotation.toObject(message.annotation[j], options); - } - return object; + ResourceDescriptor.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); }; /** - * Converts this GeneratedCodeInfo to JSON. - * @function toJSON - * @memberof google.protobuf.GeneratedCodeInfo - * @instance - * @returns {Object.} JSON object + * Verifies a ResourceDescriptor message. + * @function verify + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - GeneratedCodeInfo.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - GeneratedCodeInfo.Annotation = (function() { - - /** - * Properties of an Annotation. - * @memberof google.protobuf.GeneratedCodeInfo - * @interface IAnnotation - * @property {Array.|null} [path] Annotation path - * @property {string|null} [sourceFile] Annotation sourceFile - * @property {number|null} [begin] Annotation begin - * @property {number|null} [end] Annotation end - */ - - /** - * Constructs a new Annotation. - * @memberof google.protobuf.GeneratedCodeInfo - * @classdesc Represents an Annotation. - * @implements IAnnotation - * @constructor - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set - */ - function Annotation(properties) { - this.path = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; + ResourceDescriptor.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.pattern != null && message.hasOwnProperty("pattern")) { + if (!Array.isArray(message.pattern)) + return "pattern: array expected"; + for (var i = 0; i < message.pattern.length; ++i) + if (!$util.isString(message.pattern[i])) + return "pattern: string[] expected"; } - - /** - * Annotation path. - * @member {Array.} path - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.path = $util.emptyArray; - - /** - * Annotation sourceFile. - * @member {string} sourceFile - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.sourceFile = ""; - - /** - * Annotation begin. - * @member {number} begin - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.begin = 0; - - /** - * Annotation end. - * @member {number} end - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.end = 0; - - /** - * Creates a new Annotation instance using the specified properties. - * @function create - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation instance - */ - Annotation.create = function create(properties) { - return new Annotation(properties); - }; - - /** - * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @function encode - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Annotation.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.path != null && message.path.length) { - writer.uint32(/* id 1, wireType 2 =*/10).fork(); - for (var i = 0; i < message.path.length; ++i) - writer.int32(message.path[i]); - writer.ldelim(); + if (message.nameField != null && message.hasOwnProperty("nameField")) + if (!$util.isString(message.nameField)) + return "nameField: string expected"; + if (message.history != null && message.hasOwnProperty("history")) + switch (message.history) { + default: + return "history: enum value expected"; + case 0: + case 1: + case 2: + break; } - if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); - if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); - return writer; - }; - - /** - * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Annotation.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Annotation message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Annotation.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { + if (message.plural != null && message.hasOwnProperty("plural")) + if (!$util.isString(message.plural)) + return "plural: string expected"; + if (message.singular != null && message.hasOwnProperty("singular")) + if (!$util.isString(message.singular)) + return "singular: string expected"; + if (message.style != null && message.hasOwnProperty("style")) { + if (!Array.isArray(message.style)) + return "style: array expected"; + for (var i = 0; i < message.style.length; ++i) + switch (message.style[i]) { + default: + return "style: enum value[] expected"; + case 0: case 1: - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - case 2: - message.sourceFile = reader.string(); - break; - case 3: - message.begin = reader.int32(); - break; - case 4: - message.end = reader.int32(); break; + } + } + return null; + }; + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} object Plain object + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + */ + ResourceDescriptor.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceDescriptor) + return object; + var message = new $root.google.api.ResourceDescriptor(); + if (object.type != null) + message.type = String(object.type); + if (object.pattern) { + if (!Array.isArray(object.pattern)) + throw TypeError(".google.api.ResourceDescriptor.pattern: array expected"); + message.pattern = []; + for (var i = 0; i < object.pattern.length; ++i) + message.pattern[i] = String(object.pattern[i]); + } + if (object.nameField != null) + message.nameField = String(object.nameField); + switch (object.history) { + case "HISTORY_UNSPECIFIED": + case 0: + message.history = 0; + break; + case "ORIGINALLY_SINGLE_PATTERN": + case 1: + message.history = 1; + break; + case "FUTURE_MULTI_PATTERN": + case 2: + message.history = 2; + break; + } + if (object.plural != null) + message.plural = String(object.plural); + if (object.singular != null) + message.singular = String(object.singular); + if (object.style) { + if (!Array.isArray(object.style)) + throw TypeError(".google.api.ResourceDescriptor.style: array expected"); + message.style = []; + for (var i = 0; i < object.style.length; ++i) + switch (object.style[i]) { default: - reader.skipType(tag & 7); + case "STYLE_UNSPECIFIED": + case 0: + message.style[i] = 0; + break; + case "DECLARATIVE_FRIENDLY": + case 1: + message.style[i] = 1; break; } - } - return message; - }; - - /** - * Decodes an Annotation message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Annotation.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Annotation message. - * @function verify - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Annotation.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.path != null && message.hasOwnProperty("path")) { - if (!Array.isArray(message.path)) - return "path: array expected"; - for (var i = 0; i < message.path.length; ++i) - if (!$util.isInteger(message.path[i])) - return "path: integer[] expected"; - } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) - if (!$util.isString(message.sourceFile)) - return "sourceFile: string expected"; - if (message.begin != null && message.hasOwnProperty("begin")) - if (!$util.isInteger(message.begin)) - return "begin: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - return null; - }; + } + return message; + }; - /** - * Creates an Annotation message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - */ - Annotation.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.GeneratedCodeInfo.Annotation) - return object; - var message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); - if (object.path) { - if (!Array.isArray(object.path)) - throw TypeError(".google.protobuf.GeneratedCodeInfo.Annotation.path: array expected"); - message.path = []; - for (var i = 0; i < object.path.length; ++i) - message.path[i] = object.path[i] | 0; - } - if (object.sourceFile != null) - message.sourceFile = String(object.sourceFile); - if (object.begin != null) - message.begin = object.begin | 0; - if (object.end != null) - message.end = object.end | 0; - return message; - }; + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.ResourceDescriptor} message ResourceDescriptor + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ResourceDescriptor.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.pattern = []; + object.style = []; + } + if (options.defaults) { + object.type = ""; + object.nameField = ""; + object.history = options.enums === String ? "HISTORY_UNSPECIFIED" : 0; + object.plural = ""; + object.singular = ""; + } + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.pattern && message.pattern.length) { + object.pattern = []; + for (var j = 0; j < message.pattern.length; ++j) + object.pattern[j] = message.pattern[j]; + } + if (message.nameField != null && message.hasOwnProperty("nameField")) + object.nameField = message.nameField; + if (message.history != null && message.hasOwnProperty("history")) + object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] : message.history; + if (message.plural != null && message.hasOwnProperty("plural")) + object.plural = message.plural; + if (message.singular != null && message.hasOwnProperty("singular")) + object.singular = message.singular; + if (message.style && message.style.length) { + object.style = []; + for (var j = 0; j < message.style.length; ++j) + object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; + } + return object; + }; - /** - * Creates a plain object from an Annotation message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.Annotation} message Annotation - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Annotation.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.path = []; - if (options.defaults) { - object.sourceFile = ""; - object.begin = 0; - object.end = 0; - } - if (message.path && message.path.length) { - object.path = []; - for (var j = 0; j < message.path.length; ++j) - object.path[j] = message.path[j]; - } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) - object.sourceFile = message.sourceFile; - if (message.begin != null && message.hasOwnProperty("begin")) - object.begin = message.begin; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - return object; - }; + /** + * Converts this ResourceDescriptor to JSON. + * @function toJSON + * @memberof google.api.ResourceDescriptor + * @instance + * @returns {Object.} JSON object + */ + ResourceDescriptor.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Converts this Annotation to JSON. - * @function toJSON - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - * @returns {Object.} JSON object - */ - Annotation.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * History enum. + * @name google.api.ResourceDescriptor.History + * @enum {number} + * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value + * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value + * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value + */ + ResourceDescriptor.History = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "HISTORY_UNSPECIFIED"] = 0; + values[valuesById[1] = "ORIGINALLY_SINGLE_PATTERN"] = 1; + values[valuesById[2] = "FUTURE_MULTI_PATTERN"] = 2; + return values; + })(); - return Annotation; + /** + * Style enum. + * @name google.api.ResourceDescriptor.Style + * @enum {number} + * @property {number} STYLE_UNSPECIFIED=0 STYLE_UNSPECIFIED value + * @property {number} DECLARATIVE_FRIENDLY=1 DECLARATIVE_FRIENDLY value + */ + ResourceDescriptor.Style = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STYLE_UNSPECIFIED"] = 0; + values[valuesById[1] = "DECLARATIVE_FRIENDLY"] = 1; + return values; })(); - return GeneratedCodeInfo; + return ResourceDescriptor; })(); - protobuf.Timestamp = (function() { + api.ResourceReference = (function() { /** - * Properties of a Timestamp. - * @memberof google.protobuf - * @interface ITimestamp - * @property {number|Long|null} [seconds] Timestamp seconds - * @property {number|null} [nanos] Timestamp nanos + * Properties of a ResourceReference. + * @memberof google.api + * @interface IResourceReference + * @property {string|null} [type] ResourceReference type + * @property {string|null} [childType] ResourceReference childType */ - /** - * Constructs a new Timestamp. - * @memberof google.protobuf - * @classdesc Represents a Timestamp. - * @implements ITimestamp + /** + * Constructs a new ResourceReference. + * @memberof google.api + * @classdesc Represents a ResourceReference. + * @implements IResourceReference * @constructor - * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @param {google.api.IResourceReference=} [properties] Properties to set */ - function Timestamp(properties) { + function ResourceReference(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -19848,88 +26642,88 @@ } /** - * Timestamp seconds. - * @member {number|Long} seconds - * @memberof google.protobuf.Timestamp + * ResourceReference type. + * @member {string} type + * @memberof google.api.ResourceReference * @instance */ - Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + ResourceReference.prototype.type = ""; /** - * Timestamp nanos. - * @member {number} nanos - * @memberof google.protobuf.Timestamp + * ResourceReference childType. + * @member {string} childType + * @memberof google.api.ResourceReference * @instance */ - Timestamp.prototype.nanos = 0; + ResourceReference.prototype.childType = ""; /** - * Creates a new Timestamp instance using the specified properties. + * Creates a new ResourceReference instance using the specified properties. * @function create - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - * @returns {google.protobuf.Timestamp} Timestamp instance + * @param {google.api.IResourceReference=} [properties] Properties to set + * @returns {google.api.ResourceReference} ResourceReference instance */ - Timestamp.create = function create(properties) { - return new Timestamp(properties); + ResourceReference.create = function create(properties) { + return new ResourceReference(properties); }; /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. * @function encode - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Timestamp.encode = function encode(message, writer) { + ResourceReference.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); return writer; }; /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + ResourceReference.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a Timestamp message from the specified reader or buffer. + * Decodes a ResourceReference message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.api.ResourceReference} ResourceReference * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Timestamp.decode = function decode(reader, length) { + ResourceReference.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: - message.seconds = reader.int64(); + message.type = reader.string(); break; case 2: - message.nanos = reader.int32(); + message.childType = reader.string(); break; default: reader.skipType(tag & 7); @@ -19940,129 +26734,131 @@ }; /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.api.ResourceReference} ResourceReference * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Timestamp.decodeDelimited = function decodeDelimited(reader) { + ResourceReference.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a Timestamp message. + * Verifies a ResourceReference message. * @function verify - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Timestamp.verify = function verify(message) { + ResourceReference.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) - return "seconds: integer|Long expected"; - if (message.nanos != null && message.hasOwnProperty("nanos")) - if (!$util.isInteger(message.nanos)) - return "nanos: integer expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.childType != null && message.hasOwnProperty("childType")) + if (!$util.isString(message.childType)) + return "childType: string expected"; return null; }; /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static * @param {Object.} object Plain object - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.api.ResourceReference} ResourceReference */ - Timestamp.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Timestamp) + ResourceReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceReference) return object; - var message = new $root.google.protobuf.Timestamp(); - if (object.seconds != null) - if ($util.Long) - (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; - else if (typeof object.seconds === "string") - message.seconds = parseInt(object.seconds, 10); - else if (typeof object.seconds === "number") - message.seconds = object.seconds; - else if (typeof object.seconds === "object") - message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); - if (object.nanos != null) - message.nanos = object.nanos | 0; + var message = new $root.google.api.ResourceReference(); + if (object.type != null) + message.type = String(object.type); + if (object.childType != null) + message.childType = String(object.childType); return message; }; /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @static - * @param {google.protobuf.Timestamp} message Timestamp + * @param {google.api.ResourceReference} message ResourceReference * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - Timestamp.toObject = function toObject(message, options) { + ResourceReference.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; if (options.defaults) { - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.seconds = options.longs === String ? "0" : 0; - object.nanos = 0; + object.type = ""; + object.childType = ""; } - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (typeof message.seconds === "number") - object.seconds = options.longs === String ? String(message.seconds) : message.seconds; - else - object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; - if (message.nanos != null && message.hasOwnProperty("nanos")) - object.nanos = message.nanos; + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.childType != null && message.hasOwnProperty("childType")) + object.childType = message.childType; return object; }; /** - * Converts this Timestamp to JSON. + * Converts this ResourceReference to JSON. * @function toJSON - * @memberof google.protobuf.Timestamp + * @memberof google.api.ResourceReference * @instance * @returns {Object.} JSON object */ - Timestamp.prototype.toJSON = function toJSON() { + ResourceReference.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return Timestamp; + return ResourceReference; })(); - protobuf.Empty = (function() { + return api; + })(); + + google.rpc = (function() { + + /** + * Namespace rpc. + * @memberof google + * @namespace + */ + var rpc = {}; + + rpc.Status = (function() { /** - * Properties of an Empty. - * @memberof google.protobuf - * @interface IEmpty + * Properties of a Status. + * @memberof google.rpc + * @interface IStatus + * @property {number|null} [code] Status code + * @property {string|null} [message] Status message + * @property {Array.|null} [details] Status details */ /** - * Constructs a new Empty. - * @memberof google.protobuf - * @classdesc Represents an Empty. - * @implements IEmpty + * Constructs a new Status. + * @memberof google.rpc + * @classdesc Represents a Status. + * @implements IStatus * @constructor - * @param {google.protobuf.IEmpty=} [properties] Properties to set + * @param {google.rpc.IStatus=} [properties] Properties to set */ - function Empty(properties) { + function Status(properties) { + this.details = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -20070,63 +26866,105 @@ } /** - * Creates a new Empty instance using the specified properties. + * Status code. + * @member {number} code + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.code = 0; + + /** + * Status message. + * @member {string} message + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.message = ""; + + /** + * Status details. + * @member {Array.} details + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.details = $util.emptyArray; + + /** + * Creates a new Status instance using the specified properties. * @function create - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static - * @param {google.protobuf.IEmpty=} [properties] Properties to set - * @returns {google.protobuf.Empty} Empty instance + * @param {google.rpc.IStatus=} [properties] Properties to set + * @returns {google.rpc.Status} Status instance */ - Empty.create = function create(properties) { - return new Empty(properties); + Status.create = function create(properties) { + return new Status(properties); }; /** - * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. * @function encode - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static - * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {google.rpc.IStatus} message Status message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Empty.encode = function encode(message, writer) { + Status.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); + if (message.message != null && Object.hasOwnProperty.call(message, "message")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.message); + if (message.details != null && message.details.length) + for (var i = 0; i < message.details.length; ++i) + $root.google.protobuf.Any.encode(message.details[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; /** - * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static - * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {google.rpc.IStatus} message Status message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Empty.encodeDelimited = function encodeDelimited(message, writer) { + Status.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes an Empty message from the specified reader or buffer. + * Decodes a Status message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Empty} Empty + * @returns {google.rpc.Status} Status * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Empty.decode = function decode(reader, length) { + Status.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.Status(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { + case 1: + message.code = reader.int32(); + break; + case 2: + message.message = reader.string(); + break; + case 3: + if (!(message.details && message.details.length)) + message.details = []; + message.details.push($root.google.protobuf.Any.decode(reader, reader.uint32())); + break; default: reader.skipType(tag & 7); break; @@ -20136,77 +26974,125 @@ }; /** - * Decodes an Empty message from the specified reader or buffer, length delimited. + * Decodes a Status message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Empty} Empty + * @returns {google.rpc.Status} Status * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Empty.decodeDelimited = function decodeDelimited(reader) { + Status.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies an Empty message. + * Verifies a Status message. * @function verify - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Empty.verify = function verify(message) { + Status.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + if (message.code != null && message.hasOwnProperty("code")) + if (!$util.isInteger(message.code)) + return "code: integer expected"; + if (message.message != null && message.hasOwnProperty("message")) + if (!$util.isString(message.message)) + return "message: string expected"; + if (message.details != null && message.hasOwnProperty("details")) { + if (!Array.isArray(message.details)) + return "details: array expected"; + for (var i = 0; i < message.details.length; ++i) { + var error = $root.google.protobuf.Any.verify(message.details[i]); + if (error) + return "details." + error; + } + } return null; }; /** - * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * Creates a Status message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static * @param {Object.} object Plain object - * @returns {google.protobuf.Empty} Empty + * @returns {google.rpc.Status} Status */ - Empty.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Empty) + Status.fromObject = function fromObject(object) { + if (object instanceof $root.google.rpc.Status) return object; - return new $root.google.protobuf.Empty(); + var message = new $root.google.rpc.Status(); + if (object.code != null) + message.code = object.code | 0; + if (object.message != null) + message.message = String(object.message); + if (object.details) { + if (!Array.isArray(object.details)) + throw TypeError(".google.rpc.Status.details: array expected"); + message.details = []; + for (var i = 0; i < object.details.length; ++i) { + if (typeof object.details[i] !== "object") + throw TypeError(".google.rpc.Status.details: object expected"); + message.details[i] = $root.google.protobuf.Any.fromObject(object.details[i]); + } + } + return message; }; /** - * Creates a plain object from an Empty message. Also converts values to other types if specified. + * Creates a plain object from a Status message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @static - * @param {google.protobuf.Empty} message Empty + * @param {google.rpc.Status} message Status * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - Empty.toObject = function toObject() { - return {}; + Status.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.details = []; + if (options.defaults) { + object.code = 0; + object.message = ""; + } + if (message.code != null && message.hasOwnProperty("code")) + object.code = message.code; + if (message.message != null && message.hasOwnProperty("message")) + object.message = message.message; + if (message.details && message.details.length) { + object.details = []; + for (var j = 0; j < message.details.length; ++j) + object.details[j] = $root.google.protobuf.Any.toObject(message.details[j], options); + } + return object; }; /** - * Converts this Empty to JSON. + * Converts this Status to JSON. * @function toJSON - * @memberof google.protobuf.Empty + * @memberof google.rpc.Status * @instance * @returns {Object.} JSON object */ - Empty.prototype.toJSON = function toJSON() { + Status.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; - return Empty; + return Status; })(); - return protobuf; + return rpc; })(); return google; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index b3dca4ec481..7070197fe3b 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -13,7 +13,7 @@ "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", "java_multiple_files": true, - "java_outer_classname": "StreamProto", + "java_outer_classname": "TableProto", "java_package": "com.google.cloud.bigquery.storage.v1", "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1", "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", @@ -77,6 +77,23 @@ } } }, + "ProtoSchema": { + "fields": { + "protoDescriptor": { + "type": "google.protobuf.DescriptorProto", + "id": 1 + } + } + }, + "ProtoRows": { + "fields": { + "serializedRows": { + "rule": "repeated", + "type": "bytes", + "id": 1 + } + } + }, "BigQueryRead": { "options": { "(google.api.default_host)": "bigquerystorage.googleapis.com", @@ -138,6 +155,134 @@ } } }, + "BigQueryWrite": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateWriteStream": { + "requestType": "CreateWriteStreamRequest", + "responseType": "WriteStream", + "options": { + "(google.api.http).post": "/v1/{parent=projects/*/datasets/*/tables/*}", + "(google.api.http).body": "write_stream", + "(google.api.method_signature)": "parent,write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{parent=projects/*/datasets/*/tables/*}", + "body": "write_stream" + } + }, + { + "(google.api.method_signature)": "parent,write_stream" + } + ] + }, + "AppendRows": { + "requestType": "AppendRowsRequest", + "requestStream": true, + "responseType": "AppendRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "write_stream" + } + ] + }, + "GetWriteStream": { + "requestType": "GetWriteStreamRequest", + "responseType": "WriteStream", + "options": { + "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "name" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "name" + } + ] + }, + "FinalizeWriteStream": { + "requestType": "FinalizeWriteStreamRequest", + "responseType": "FinalizeWriteStreamResponse", + "options": { + "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "name" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "name" + } + ] + }, + "BatchCommitWriteStreams": { + "requestType": "BatchCommitWriteStreamsRequest", + "responseType": "BatchCommitWriteStreamsResponse", + "options": { + "(google.api.http).get": "/v1/{parent=projects/*/datasets/*/tables/*}", + "(google.api.method_signature)": "parent" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{parent=projects/*/datasets/*/tables/*}" + } + }, + { + "(google.api.method_signature)": "parent" + } + ] + }, + "FlushRows": { + "requestType": "FlushRowsRequest", + "responseType": "FlushRowsResponse", + "options": { + "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "write_stream" + } + ] + } + } + }, "CreateReadSessionRequest": { "fields": { "parent": { @@ -287,6 +432,222 @@ } } }, + "CreateWriteStreamRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "writeStream": { + "type": "WriteStream", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "AppendRowsRequest": { + "oneofs": { + "rows": { + "oneof": [ + "protoRows" + ] + } + }, + "fields": { + "writeStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + }, + "offset": { + "type": "google.protobuf.Int64Value", + "id": 2 + }, + "protoRows": { + "type": "ProtoData", + "id": 4 + }, + "traceId": { + "type": "string", + "id": 6 + } + }, + "nested": { + "ProtoData": { + "fields": { + "writerSchema": { + "type": "ProtoSchema", + "id": 1 + }, + "rows": { + "type": "ProtoRows", + "id": 2 + } + } + } + } + }, + "AppendRowsResponse": { + "oneofs": { + "response": { + "oneof": [ + "appendResult", + "error" + ] + } + }, + "fields": { + "appendResult": { + "type": "AppendResult", + "id": 1 + }, + "error": { + "type": "google.rpc.Status", + "id": 2 + }, + "updatedSchema": { + "type": "TableSchema", + "id": 3 + } + }, + "nested": { + "AppendResult": { + "fields": { + "offset": { + "type": "google.protobuf.Int64Value", + "id": 1 + } + } + } + } + }, + "GetWriteStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + } + } + }, + "BatchCommitWriteStreamsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "writeStreams": { + "rule": "repeated", + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCommitWriteStreamsResponse": { + "fields": { + "commitTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "streamErrors": { + "rule": "repeated", + "type": "StorageError", + "id": 2 + } + } + }, + "FinalizeWriteStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + } + } + }, + "FinalizeWriteStreamResponse": { + "fields": { + "rowCount": { + "type": "int64", + "id": 1 + } + } + }, + "FlushRowsRequest": { + "fields": { + "writeStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + }, + "offset": { + "type": "google.protobuf.Int64Value", + "id": 2 + } + } + }, + "FlushRowsResponse": { + "fields": { + "offset": { + "type": "int64", + "id": 1 + } + } + }, + "StorageError": { + "fields": { + "code": { + "type": "StorageErrorCode", + "id": 1 + }, + "entity": { + "type": "string", + "id": 2 + }, + "errorMessage": { + "type": "string", + "id": 3 + } + }, + "nested": { + "StorageErrorCode": { + "values": { + "STORAGE_ERROR_CODE_UNSPECIFIED": 0, + "TABLE_NOT_FOUND": 1, + "STREAM_ALREADY_COMMITTED": 2, + "STREAM_NOT_FOUND": 3, + "INVALID_STREAM_TYPE": 4, + "INVALID_STREAM_STATE": 5, + "STREAM_FINALIZED": 6, + "SCHEMA_MISMATCH_EXTRA_FIELDS": 7 + } + } + } + }, "DataFormat": { "values": { "DATA_FORMAT_UNSPECIFIED": 0, @@ -433,12 +794,165 @@ } } } - } - } - }, - "v1beta1": { - "options": { - "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", + }, + "WriteStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/WriteStream", + "(google.api.resource).pattern": "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "type": { + "type": "Type", + "id": 2, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + }, + "createTime": { + "type": "google.protobuf.Timestamp", + "id": 3, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "commitTime": { + "type": "google.protobuf.Timestamp", + "id": 4, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "tableSchema": { + "type": "TableSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + }, + "nested": { + "Type": { + "values": { + "TYPE_UNSPECIFIED": 0, + "COMMITTED": 1, + "PENDING": 2, + "BUFFERED": 3 + } + } + } + }, + "TableSchema": { + "fields": { + "fields": { + "rule": "repeated", + "type": "TableFieldSchema", + "id": 1 + } + } + }, + "TableFieldSchema": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "type": { + "type": "Type", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "mode": { + "type": "Mode", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "fields": { + "rule": "repeated", + "type": "TableFieldSchema", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "description": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "maxLength": { + "type": "int64", + "id": 7, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "precision": { + "type": "int64", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "scale": { + "type": "int64", + "id": 9, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + }, + "nested": { + "Type": { + "values": { + "TYPE_UNSPECIFIED": 0, + "STRING": 1, + "INT64": 2, + "DOUBLE": 3, + "STRUCT": 4, + "BYTES": 5, + "BOOL": 6, + "TIMESTAMP": 7, + "DATE": 8, + "TIME": 9, + "DATETIME": 10, + "GEOGRAPHY": 11, + "NUMERIC": 12, + "BIGNUMERIC": 13, + "INTERVAL": 14, + "JSON": 15 + } + }, + "Mode": { + "values": { + "MODE_UNSPECIFIED": 0, + "NULLABLE": 1, + "REQUIRED": 2, + "REPEATED": 3 + } + } + } + } + } + }, + "v1beta1": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", "java_outer_classname": "TableReferenceProto", "java_package": "com.google.cloud.bigquery.storage.v1beta1" }, @@ -913,309 +1427,100 @@ } } }, - "api": { + "protobuf": { "options": { - "go_package": "google.golang.org/genproto/googleapis/api/annotations;annotations", - "java_multiple_files": true, - "java_outer_classname": "ResourceProto", - "java_package": "com.google.api", - "objc_class_prefix": "GAPI", - "cc_enable_arenas": true + "go_package": "google.golang.org/protobuf/types/descriptorpb", + "java_package": "com.google.protobuf", + "java_outer_classname": "DescriptorProtos", + "csharp_namespace": "Google.Protobuf.Reflection", + "objc_class_prefix": "GPB", + "cc_enable_arenas": true, + "optimize_for": "SPEED" }, "nested": { - "http": { - "type": "HttpRule", - "id": 72295728, - "extend": "google.protobuf.MethodOptions" - }, - "Http": { + "FileDescriptorSet": { "fields": { - "rules": { + "file": { "rule": "repeated", - "type": "HttpRule", + "type": "FileDescriptorProto", "id": 1 - }, - "fullyDecodeReservedExpansion": { - "type": "bool", - "id": 2 } } }, - "HttpRule": { - "oneofs": { - "pattern": { - "oneof": [ - "get", - "put", - "post", - "delete", - "patch", - "custom" - ] - } - }, + "FileDescriptorProto": { "fields": { - "selector": { + "name": { "type": "string", "id": 1 }, - "get": { + "package": { "type": "string", "id": 2 }, - "put": { + "dependency": { + "rule": "repeated", "type": "string", "id": 3 }, - "post": { - "type": "string", + "publicDependency": { + "rule": "repeated", + "type": "int32", + "id": 10, + "options": { + "packed": false + } + }, + "weakDependency": { + "rule": "repeated", + "type": "int32", + "id": 11, + "options": { + "packed": false + } + }, + "messageType": { + "rule": "repeated", + "type": "DescriptorProto", "id": 4 }, - "delete": { - "type": "string", + "enumType": { + "rule": "repeated", + "type": "EnumDescriptorProto", "id": 5 }, - "patch": { - "type": "string", + "service": { + "rule": "repeated", + "type": "ServiceDescriptorProto", "id": 6 }, - "custom": { - "type": "CustomHttpPattern", + "extension": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 7 + }, + "options": { + "type": "FileOptions", "id": 8 }, - "body": { - "type": "string", - "id": 7 + "sourceCodeInfo": { + "type": "SourceCodeInfo", + "id": 9 }, - "responseBody": { + "syntax": { "type": "string", "id": 12 - }, - "additionalBindings": { - "rule": "repeated", - "type": "HttpRule", - "id": 11 } } }, - "CustomHttpPattern": { + "DescriptorProto": { "fields": { - "kind": { + "name": { "type": "string", "id": 1 }, - "path": { - "type": "string", - "id": 2 - } - } - }, - "methodSignature": { - "rule": "repeated", - "type": "string", - "id": 1051, - "extend": "google.protobuf.MethodOptions" - }, - "defaultHost": { - "type": "string", - "id": 1049, - "extend": "google.protobuf.ServiceOptions" - }, - "oauthScopes": { - "type": "string", - "id": 1050, - "extend": "google.protobuf.ServiceOptions" - }, - "fieldBehavior": { - "rule": "repeated", - "type": "google.api.FieldBehavior", - "id": 1052, - "extend": "google.protobuf.FieldOptions" - }, - "FieldBehavior": { - "values": { - "FIELD_BEHAVIOR_UNSPECIFIED": 0, - "OPTIONAL": 1, - "REQUIRED": 2, - "OUTPUT_ONLY": 3, - "INPUT_ONLY": 4, - "IMMUTABLE": 5, - "UNORDERED_LIST": 6 - } - }, - "resourceReference": { - "type": "google.api.ResourceReference", - "id": 1055, - "extend": "google.protobuf.FieldOptions" - }, - "resourceDefinition": { - "rule": "repeated", - "type": "google.api.ResourceDescriptor", - "id": 1053, - "extend": "google.protobuf.FileOptions" - }, - "resource": { - "type": "google.api.ResourceDescriptor", - "id": 1053, - "extend": "google.protobuf.MessageOptions" - }, - "ResourceDescriptor": { - "fields": { - "type": { - "type": "string", - "id": 1 - }, - "pattern": { - "rule": "repeated", - "type": "string", - "id": 2 - }, - "nameField": { - "type": "string", - "id": 3 - }, - "history": { - "type": "History", - "id": 4 - }, - "plural": { - "type": "string", - "id": 5 - }, - "singular": { - "type": "string", - "id": 6 - }, - "style": { - "rule": "repeated", - "type": "Style", - "id": 10 - } - }, - "nested": { - "History": { - "values": { - "HISTORY_UNSPECIFIED": 0, - "ORIGINALLY_SINGLE_PATTERN": 1, - "FUTURE_MULTI_PATTERN": 2 - } - }, - "Style": { - "values": { - "STYLE_UNSPECIFIED": 0, - "DECLARATIVE_FRIENDLY": 1 - } - } - } - }, - "ResourceReference": { - "fields": { - "type": { - "type": "string", - "id": 1 - }, - "childType": { - "type": "string", - "id": 2 - } - } - } - } - }, - "protobuf": { - "options": { - "go_package": "google.golang.org/protobuf/types/descriptorpb", - "java_package": "com.google.protobuf", - "java_outer_classname": "DescriptorProtos", - "csharp_namespace": "Google.Protobuf.Reflection", - "objc_class_prefix": "GPB", - "cc_enable_arenas": true, - "optimize_for": "SPEED" - }, - "nested": { - "FileDescriptorSet": { - "fields": { - "file": { - "rule": "repeated", - "type": "FileDescriptorProto", - "id": 1 - } - } - }, - "FileDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "package": { - "type": "string", - "id": 2 - }, - "dependency": { - "rule": "repeated", - "type": "string", - "id": 3 - }, - "publicDependency": { - "rule": "repeated", - "type": "int32", - "id": 10, - "options": { - "packed": false - } - }, - "weakDependency": { - "rule": "repeated", - "type": "int32", - "id": 11, - "options": { - "packed": false - } - }, - "messageType": { - "rule": "repeated", - "type": "DescriptorProto", - "id": 4 - }, - "enumType": { - "rule": "repeated", - "type": "EnumDescriptorProto", - "id": 5 - }, - "service": { - "rule": "repeated", - "type": "ServiceDescriptorProto", - "id": 6 - }, - "extension": { - "rule": "repeated", - "type": "FieldDescriptorProto", - "id": 7 - }, - "options": { - "type": "FileOptions", - "id": 8 - }, - "sourceCodeInfo": { - "type": "SourceCodeInfo", - "id": 9 - }, - "syntax": { - "type": "string", - "id": 12 - } - } - }, - "DescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "field": { - "rule": "repeated", - "type": "FieldDescriptorProto", + "field": { + "rule": "repeated", + "type": "FieldDescriptorProto", "id": 2 }, "extension": { @@ -2029,10 +2334,332 @@ } } }, + "DoubleValue": { + "fields": { + "value": { + "type": "double", + "id": 1 + } + } + }, + "FloatValue": { + "fields": { + "value": { + "type": "float", + "id": 1 + } + } + }, + "Int64Value": { + "fields": { + "value": { + "type": "int64", + "id": 1 + } + } + }, + "UInt64Value": { + "fields": { + "value": { + "type": "uint64", + "id": 1 + } + } + }, + "Int32Value": { + "fields": { + "value": { + "type": "int32", + "id": 1 + } + } + }, + "UInt32Value": { + "fields": { + "value": { + "type": "uint32", + "id": 1 + } + } + }, + "BoolValue": { + "fields": { + "value": { + "type": "bool", + "id": 1 + } + } + }, + "StringValue": { + "fields": { + "value": { + "type": "string", + "id": 1 + } + } + }, + "BytesValue": { + "fields": { + "value": { + "type": "bytes", + "id": 1 + } + } + }, + "Any": { + "fields": { + "type_url": { + "type": "string", + "id": 1 + }, + "value": { + "type": "bytes", + "id": 2 + } + } + }, "Empty": { "fields": {} } } + }, + "api": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/api/annotations;annotations", + "java_multiple_files": true, + "java_outer_classname": "ResourceProto", + "java_package": "com.google.api", + "objc_class_prefix": "GAPI", + "cc_enable_arenas": true + }, + "nested": { + "http": { + "type": "HttpRule", + "id": 72295728, + "extend": "google.protobuf.MethodOptions" + }, + "Http": { + "fields": { + "rules": { + "rule": "repeated", + "type": "HttpRule", + "id": 1 + }, + "fullyDecodeReservedExpansion": { + "type": "bool", + "id": 2 + } + } + }, + "HttpRule": { + "oneofs": { + "pattern": { + "oneof": [ + "get", + "put", + "post", + "delete", + "patch", + "custom" + ] + } + }, + "fields": { + "selector": { + "type": "string", + "id": 1 + }, + "get": { + "type": "string", + "id": 2 + }, + "put": { + "type": "string", + "id": 3 + }, + "post": { + "type": "string", + "id": 4 + }, + "delete": { + "type": "string", + "id": 5 + }, + "patch": { + "type": "string", + "id": 6 + }, + "custom": { + "type": "CustomHttpPattern", + "id": 8 + }, + "body": { + "type": "string", + "id": 7 + }, + "responseBody": { + "type": "string", + "id": 12 + }, + "additionalBindings": { + "rule": "repeated", + "type": "HttpRule", + "id": 11 + } + } + }, + "CustomHttpPattern": { + "fields": { + "kind": { + "type": "string", + "id": 1 + }, + "path": { + "type": "string", + "id": 2 + } + } + }, + "methodSignature": { + "rule": "repeated", + "type": "string", + "id": 1051, + "extend": "google.protobuf.MethodOptions" + }, + "defaultHost": { + "type": "string", + "id": 1049, + "extend": "google.protobuf.ServiceOptions" + }, + "oauthScopes": { + "type": "string", + "id": 1050, + "extend": "google.protobuf.ServiceOptions" + }, + "fieldBehavior": { + "rule": "repeated", + "type": "google.api.FieldBehavior", + "id": 1052, + "extend": "google.protobuf.FieldOptions" + }, + "FieldBehavior": { + "values": { + "FIELD_BEHAVIOR_UNSPECIFIED": 0, + "OPTIONAL": 1, + "REQUIRED": 2, + "OUTPUT_ONLY": 3, + "INPUT_ONLY": 4, + "IMMUTABLE": 5, + "UNORDERED_LIST": 6 + } + }, + "resourceReference": { + "type": "google.api.ResourceReference", + "id": 1055, + "extend": "google.protobuf.FieldOptions" + }, + "resourceDefinition": { + "rule": "repeated", + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.FileOptions" + }, + "resource": { + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.MessageOptions" + }, + "ResourceDescriptor": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "pattern": { + "rule": "repeated", + "type": "string", + "id": 2 + }, + "nameField": { + "type": "string", + "id": 3 + }, + "history": { + "type": "History", + "id": 4 + }, + "plural": { + "type": "string", + "id": 5 + }, + "singular": { + "type": "string", + "id": 6 + }, + "style": { + "rule": "repeated", + "type": "Style", + "id": 10 + } + }, + "nested": { + "History": { + "values": { + "HISTORY_UNSPECIFIED": 0, + "ORIGINALLY_SINGLE_PATTERN": 1, + "FUTURE_MULTI_PATTERN": 2 + } + }, + "Style": { + "values": { + "STYLE_UNSPECIFIED": 0, + "DECLARATIVE_FRIENDLY": 1 + } + } + } + }, + "ResourceReference": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "childType": { + "type": "string", + "id": 2 + } + } + } + } + }, + "rpc": { + "options": { + "cc_enable_arenas": true, + "go_package": "google.golang.org/genproto/googleapis/rpc/status;status", + "java_multiple_files": true, + "java_outer_classname": "StatusProto", + "java_package": "com.google.rpc", + "objc_class_prefix": "RPC" + }, + "nested": { + "Status": { + "fields": { + "code": { + "type": "int32", + "id": 1 + }, + "message": { + "type": "string", + "id": 2 + }, + "details": { + "rule": "repeated", + "type": "google.protobuf.Any", + "id": 3 + } + } + } + } } } } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index a722be8fec6..10f28e88015 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -166,6 +166,12 @@ export class BigQueryReadClient { readStreamPathTemplate: new this._gaxModule.PathTemplate( 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}' + ), + writeStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + ), }; // Some of the methods on this service provide streaming responses. @@ -754,6 +760,126 @@ export class BigQueryReadClient { .stream; } + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Return a fully-qualified writeStream resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @param {string} stream + * @returns {string} Resource name string. + */ + writeStreamPath( + project: string, + dataset: string, + table: string, + stream: string + ) { + return this.pathTemplates.writeStreamPathTemplate.render({ + project: project, + dataset: dataset, + table: table, + stream: stream, + }); + } + + /** + * Parse the project from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .project; + } + + /** + * Parse the dataset from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .dataset; + } + + /** + * Parse the table from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the table. + */ + matchTableFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .table; + } + + /** + * Parse the stream from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .stream; + } + /** * Terminate the gRPC channel and close the client. * diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json index 42b2735b9fe..f129939b0ef 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json @@ -34,8 +34,7 @@ "retry_params_name": "default" }, "SplitReadStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default" } } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json index 13440ce253e..f0274ac3660 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json +++ b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json @@ -1,6 +1,8 @@ [ "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", "../../protos/google/cloud/bigquery/storage/v1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", "../../protos/google/cloud/bigquery/storage/v1/storage.proto", - "../../protos/google/cloud/bigquery/storage/v1/stream.proto" + "../../protos/google/cloud/bigquery/storage/v1/stream.proto", + "../../protos/google/cloud/bigquery/storage/v1/table.proto" ] diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts new file mode 100644 index 00000000000..62973bfa20a --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -0,0 +1,1183 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; + +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/big_query_write_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './big_query_write_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * BigQuery Write API. + * + * The Write API can be used to write data to BigQuery. + * + * For supplementary information about the Write API, see: + * https://cloud.google.com/bigquery/docs/write-api + * @class + * @memberof v1 + */ +export class BigQueryWriteClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + bigQueryWriteStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryWriteClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof BigQueryWriteClient; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + readSessionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}' + ), + writeStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + appendRows: new this._gaxModule.StreamDescriptor( + gax.StreamType.BIDI_STREAMING + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1.BigQueryWrite', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryWriteStub) { + return this.bigQueryWriteStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1.BigQueryWrite. + this.bigQueryWriteStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1.BigQueryWrite' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryWrite, + this._opts, + this._providedCustomServicePath + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryWriteStubMethods = [ + 'createWriteStream', + 'appendRows', + 'getWriteStream', + 'finalizeWriteStream', + 'batchCommitWriteStreams', + 'flushRows', + ]; + for (const methodName of bigQueryWriteStubMethods) { + const callPromise = this.bigQueryWriteStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.bigQueryWriteStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/bigquery.insertdata', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + createWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + createWriteStream( + request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createWriteStream( + request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Creates a write stream to the given table. + * Additionally, every table has a special stream named '_default' + * to which data can be written. This stream doesn't need to be created using + * CreateWriteStream. It is a stream that can be used simultaneously by any + * number of clients. Data written to this stream is considered committed as + * soon as an acknowledgement is received. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {google.cloud.bigquery.storage.v1.WriteStream} request.writeStream + * Required. Stream to be created. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.createWriteStream(request); + */ + createWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + parent: request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createWriteStream(request, options, callback); + } + getWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + getWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + getWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Gets information about a write stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to get, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.getWriteStream(request); + */ + getWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + name: request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getWriteStream(request, options, callback); + } + finalizeWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + finalizeWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + finalizeWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Finalize a write stream so that no new data can be appended to the + * stream. Finalize is not supported on the '_default' stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to finalize, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.finalizeWriteStream(request); + */ + finalizeWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + name: request.name || '', + }); + this.initialize(); + return this.innerApiCalls.finalizeWriteStream(request, options, callback); + } + batchCommitWriteStreams( + request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | undefined + ), + {} | undefined + ] + >; + batchCommitWriteStreams( + request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCommitWriteStreams( + request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Atomically commits a group of `PENDING` streams that belong to the same + * `parent` table. + * + * Streams must be finalized before commit and cannot be committed multiple + * times. Once a stream is committed, data in the stream becomes available + * for read operations. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Parent table that all the streams should belong to, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {string[]} request.writeStreams + * Required. The group of streams that will be committed atomically. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.batchCommitWriteStreams(request); + */ + batchCommitWriteStreams( + request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + parent: request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.batchCommitWriteStreams( + request, + options, + callback + ); + } + flushRows( + request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, + {} | undefined + ] + >; + flushRows( + request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + flushRows( + request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Flushes rows to a BUFFERED stream. + * + * If users are appending rows to BUFFERED stream, flush operation is + * required in order for the rows to become available for reading. A + * Flush operation flushes up to any previously flushed offset in a BUFFERED + * stream, to the offset specified in the request. + * + * Flush is not supported on the _default stream, since it is not BUFFERED. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.writeStream + * Required. The stream that is the target of the flush operation. + * @param {google.protobuf.Int64Value} request.offset + * Ending offset of the flush operation. Rows before this offset(including + * this offset) will be flushed. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example + * const [response] = await client.flushRows(request); + */ + flushRows( + request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + gax.routingHeader.fromParams({ + write_stream: request.writeStream || '', + }); + this.initialize(); + return this.innerApiCalls.flushRows(request, options, callback); + } + + /** + * Appends data to the given stream. + * + * If `offset` is specified, the `offset` is checked against the end of + * stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an + * attempt is made to append to an offset beyond the current end of the stream + * or `ALREADY_EXISTS` if user provides an `offset` that has already been + * written to. User can retry with adjusted offset within the same RPC + * connection. If `offset` is not specified, append happens at the end of the + * stream. + * + * The response contains an optional offset at which the append + * happened. No offset information will be returned for appends to a + * default stream. + * + * Responses are received in the same order in which requests are sent. + * There will be one response for each successful inserted request. Responses + * may optionally embed error information if the originating AppendRequest was + * not successfully processed. + * + * The specifics of when successfully appended data is made visible to the + * table are governed by the type of stream: + * + * * For COMMITTED streams (which includes the default stream), data is + * visible immediately upon successful append. + * + * * For BUFFERED streams, data is made visible via a subsequent `FlushRows` + * rpc which advances a cursor to a newer offset in the stream. + * + * * For PENDING streams, data is not made visible until the stream itself is + * finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly + * committed via the `BatchCommitWriteStreams` rpc. + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing [AppendRowsRequest]{@link google.cloud.bigquery.storage.v1.AppendRowsRequest} for write() method, and + * will emit objects representing [AppendRowsResponse]{@link google.cloud.bigquery.storage.v1.AppendRowsResponse} on 'data' event asynchronously. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) + * for more details and examples. + * @example + * const stream = client.appendRows(); + * stream.on('data', (response) => { ... }); + * stream.on('end', () => { ... }); + * stream.write(request); + * stream.end(); + */ + appendRows(options?: CallOptions): gax.CancellableStream { + this.initialize(); + return this.innerApiCalls.appendRows(options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string + ) { + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Return a fully-qualified writeStream resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @param {string} stream + * @returns {string} Resource name string. + */ + writeStreamPath( + project: string, + dataset: string, + table: string, + stream: string + ) { + return this.pathTemplates.writeStreamPathTemplate.render({ + project: project, + dataset: dataset, + table: table, + stream: stream, + }); + } + + /** + * Parse the project from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .project; + } + + /** + * Parse the dataset from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .dataset; + } + + /** + * Parse the table from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the table. + */ + matchTableFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .table; + } + + /** + * Parse the stream from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .stream; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + this.initialize(); + if (!this._terminated) { + return this.bigQueryWriteStub!.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json new file mode 100644 index 00000000000..67eb3165cb6 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json @@ -0,0 +1,59 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1.BigQueryWrite": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateWriteStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AppendRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "GetWriteStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FinalizeWriteStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "BatchCommitWriteStreams": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FlushRows": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json new file mode 100644 index 00000000000..f0274ac3660 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", + "../../protos/google/cloud/bigquery/storage/v1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1/stream.proto", + "../../protos/google/cloud/bigquery/storage/v1/table.proto" +] diff --git a/handwritten/bigquery-storage/src/v1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1/gapic_metadata.json index 3c84159f688..f52c2dae123 100644 --- a/handwritten/bigquery-storage/src/v1/gapic_metadata.json +++ b/handwritten/bigquery-storage/src/v1/gapic_metadata.json @@ -43,6 +43,75 @@ } } } + }, + "BigQueryWrite": { + "clients": { + "grpc": { + "libraryClient": "BigQueryWriteClient", + "rpcs": { + "CreateWriteStream": { + "methods": [ + "createWriteStream" + ] + }, + "GetWriteStream": { + "methods": [ + "getWriteStream" + ] + }, + "FinalizeWriteStream": { + "methods": [ + "finalizeWriteStream" + ] + }, + "BatchCommitWriteStreams": { + "methods": [ + "batchCommitWriteStreams" + ] + }, + "FlushRows": { + "methods": [ + "flushRows" + ] + }, + "AppendRows": { + "methods": [ + "appendRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryWriteClient", + "rpcs": { + "CreateWriteStream": { + "methods": [ + "createWriteStream" + ] + }, + "GetWriteStream": { + "methods": [ + "getWriteStream" + ] + }, + "FinalizeWriteStream": { + "methods": [ + "finalizeWriteStream" + ] + }, + "BatchCommitWriteStreams": { + "methods": [ + "batchCommitWriteStreams" + ] + }, + "FlushRows": { + "methods": [ + "flushRows" + ] + } + } + } + } } } } diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index db2de17014f..e1965026c9e 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -17,3 +17,4 @@ // ** All changes to this file may be overwritten. ** export {BigQueryReadClient} from './big_query_read_client'; +export {BigQueryWriteClient} from './big_query_write_client'; diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 627efaa9f20..0e917c476bd 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -21,6 +21,7 @@ const storage = require('@google-cloud/bigquery-storage'); function main() { const bigQueryReadClient = new storage.BigQueryReadClient(); + const bigQueryStorageClient = new storage.BigQueryStorageClient(); } main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 3b55b605b29..c2e01b6a3a2 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -16,17 +16,26 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import {BigQueryReadClient} from '@google-cloud/bigquery-storage'; +import { + BigQueryReadClient, + BigQueryStorageClient, +} from '@google-cloud/bigquery-storage'; // check that the client class type name can be used function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { client.close(); } +function doStuffWithBigQueryStorageClient(client: BigQueryStorageClient) { + client.close(); +} function main() { // check that the client instance can be created const bigQueryReadClient = new BigQueryReadClient(); doStuffWithBigQueryReadClient(bigQueryReadClient); + // check that the client instance can be created + const bigQueryStorageClient = new BigQueryStorageClient(); + doStuffWithBigQueryStorageClient(bigQueryStorageClient); } main(); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index e7feeb4724d..eeb7c0e2cf2 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -648,5 +648,145 @@ describe('v1.BigQueryReadClient', () => { ); }); }); + + describe('table', () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('writeStream', () => { + const fakePath = '/rendered/path/writeStream'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + stream: 'streamValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.writeStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.writeStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('writeStreamPath', () => { + const result = client.writeStreamPath( + 'projectValue', + 'datasetValue', + 'tableValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromWriteStreamName', () => { + const result = client.matchProjectFromWriteStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromWriteStreamName', () => { + const result = client.matchDatasetFromWriteStreamName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromWriteStreamName', () => { + const result = client.matchTableFromWriteStreamName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromWriteStreamName', () => { + const result = client.matchStreamFromWriteStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); }); }); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts new file mode 100644 index 00000000000..e4ba9ae6bdc --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -0,0 +1,1115 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigquerywriteModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubBidiStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + return sinon.stub().returns(mockStream); +} + +describe('v1.BigQueryWriteClient', () => { + it('has servicePath', () => { + const servicePath = bigquerywriteModule.v1.BigQueryWriteClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigquerywriteModule.v1.BigQueryWriteClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + await client.initialize(); + assert(client.bigQueryWriteStub); + }); + + it('has close method', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.close(); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createWriteStream', () => { + it('invokes createWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.createWriteStream = stubSimpleCall(expectedResponse); + const [response] = await client.createWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes createWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.createWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes createWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.createWriteStream(request), expectedError); + assert( + (client.innerApiCalls.createWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('getWriteStream', () => { + it('invokes getWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.getWriteStream = stubSimpleCall(expectedResponse); + const [response] = await client.getWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes getWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.getWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes getWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.getWriteStream(request), expectedError); + assert( + (client.innerApiCalls.getWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('finalizeWriteStream', () => { + it('invokes finalizeWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + ); + client.innerApiCalls.finalizeWriteStream = + stubSimpleCall(expectedResponse); + const [response] = await client.finalizeWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.finalizeWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes finalizeWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + ); + client.innerApiCalls.finalizeWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.finalizeWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.finalizeWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes finalizeWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.finalizeWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.finalizeWriteStream(request), expectedError); + assert( + (client.innerApiCalls.finalizeWriteStream as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('batchCommitWriteStreams', () => { + it('invokes batchCommitWriteStreams without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + ); + client.innerApiCalls.batchCommitWriteStreams = + stubSimpleCall(expectedResponse); + const [response] = await client.batchCommitWriteStreams(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.batchCommitWriteStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes batchCommitWriteStreams without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + ); + client.innerApiCalls.batchCommitWriteStreams = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchCommitWriteStreams( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.batchCommitWriteStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes batchCommitWriteStreams with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchCommitWriteStreams(request), + expectedError + ); + assert( + (client.innerApiCalls.batchCommitWriteStreams as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('flushRows', () => { + it('invokes flushRows without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + request.writeStream = ''; + const expectedHeaderRequestParams = 'write_stream='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + ); + client.innerApiCalls.flushRows = stubSimpleCall(expectedResponse); + const [response] = await client.flushRows(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.flushRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + + it('invokes flushRows without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + request.writeStream = ''; + const expectedHeaderRequestParams = 'write_stream='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + ); + client.innerApiCalls.flushRows = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.flushRows( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.flushRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); + + it('invokes flushRows with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + request.writeStream = ''; + const expectedHeaderRequestParams = 'write_stream='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.flushRows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.flushRows(request), expectedError); + assert( + (client.innerApiCalls.flushRows as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('appendRows', () => { + it('invokes appendRows without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + ); + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse() + ); + client.innerApiCalls.appendRows = stubBidiStreamingCall(expectedResponse); + const stream = client.appendRows(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.appendRows as SinonStub) + .getCall(0) + .calledWithExactly(undefined) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + + it('invokes appendRows with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + ); + request.writeStream = ''; + const expectedHeaderRequestParams = 'write_stream='; + const expectedError = new Error('expected'); + client.innerApiCalls.appendRows = stubBidiStreamingCall( + undefined, + expectedError + ); + const stream = client.appendRows(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + await assert.rejects(promise, expectedError); + assert( + (client.innerApiCalls.appendRows as SinonStub) + .getCall(0) + .calledWithExactly(undefined) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + }); + + describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readStream', () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('table', () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('writeStream', () => { + const fakePath = '/rendered/path/writeStream'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + stream: 'streamValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.writeStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.writeStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('writeStreamPath', () => { + const result = client.writeStreamPath( + 'projectValue', + 'datasetValue', + 'tableValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromWriteStreamName', () => { + const result = client.matchProjectFromWriteStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromWriteStreamName', () => { + const result = client.matchDatasetFromWriteStreamName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromWriteStreamName', () => { + const result = client.matchTableFromWriteStreamName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromWriteStreamName', () => { + const result = client.matchStreamFromWriteStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); From 21b3c0e0e13b1eff78e95f2ccbfe7b0ed18bbdb8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Sep 2021 15:36:16 +0000 Subject: [PATCH 147/333] chore: release 2.7.0 (#214) :robot: I have created a release \*beep\* \*boop\* --- ## [2.7.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.2...v2.7.0) (2021-09-27) ### Features * add BigQuery Storage Write API v1 ([#209](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/209)) ([e0401d9](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e0401d96480cd192a2fad8075884d2a8abd417ca)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 77c6ab23ab6..8d10cad8bf0 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.7.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.2...v2.7.0) (2021-09-27) + + +### Features + +* add BigQuery Storage Write API v1 ([#209](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/209)) ([e0401d9](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e0401d96480cd192a2fad8075884d2a8abd417ca)) + ### [2.6.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.1...v2.6.2) (2021-09-07) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a5bd884b166..ea671823398 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.6.2", + "version": "2.7.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From cee75795a79b9f88cdb0a1161ccfd5966ba49b53 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 19:56:29 +0000 Subject: [PATCH 148/333] build(node): run linkinator against index.html (#1227) (#218) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 73bbf7d3210..d0ca942b269 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:111973c0da7608bf1e60d070e5449d48826c385a6b92a56cb9203f1725d33c3d + digest: sha256:c0ad7c54b9210f1d10678955bc37b377e538e15cb07ecc3bac93cc7219ec2bc5 From a1b7a5d26288b6a58cff6fc97feb17d2ff287061 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Oct 2021 00:44:58 +0000 Subject: [PATCH 149/333] build(node): update deps used during postprocessing (#1243) (#219) --- .../bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/protos/protos.d.ts | 3 ++- handwritten/bigquery-storage/protos/protos.js | 7 +++++++ handwritten/bigquery-storage/protos/protos.json | 15 ++++++++++++++- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index d0ca942b269..73ad800accf 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-nodejs:latest - digest: sha256:c0ad7c54b9210f1d10678955bc37b377e538e15cb07ecc3bac93cc7219ec2bc5 + digest: sha256:bbb8dd6576ac58830a07fc17e9511ae898be44f2219d3344449b125df9854441 diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 7ec3286e4e5..ba247175654 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -10601,7 +10601,8 @@ export namespace google { OUTPUT_ONLY = 3, INPUT_ONLY = 4, IMMUTABLE = 5, - UNORDERED_LIST = 6 + UNORDERED_LIST = 6, + NON_EMPTY_DEFAULT = 7 } /** Properties of a ResourceDescriptor. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 5e9a6239f21..e74a907a0bf 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -19766,6 +19766,7 @@ case 4: case 5: case 6: + case 7: break; } } @@ -19870,6 +19871,10 @@ case 6: message[".google.api.fieldBehavior"][i] = 6; break; + case "NON_EMPTY_DEFAULT": + case 7: + message[".google.api.fieldBehavior"][i] = 7; + break; } } if (object[".google.api.resourceReference"] != null) { @@ -26178,6 +26183,7 @@ * @property {number} INPUT_ONLY=4 INPUT_ONLY value * @property {number} IMMUTABLE=5 IMMUTABLE value * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value + * @property {number} NON_EMPTY_DEFAULT=7 NON_EMPTY_DEFAULT value */ api.FieldBehavior = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -26188,6 +26194,7 @@ values[valuesById[4] = "INPUT_ONLY"] = 4; values[valuesById[5] = "IMMUTABLE"] = 5; values[valuesById[6] = "UNORDERED_LIST"] = 6; + values[valuesById[7] = "NON_EMPTY_DEFAULT"] = 7; return values; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 7070197fe3b..4feebdb2d72 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1988,6 +1988,18 @@ ] ], "reserved": [ + [ + 4, + 4 + ], + [ + 5, + 5 + ], + [ + 6, + 6 + ], [ 8, 8 @@ -2550,7 +2562,8 @@ "OUTPUT_ONLY": 3, "INPUT_ONLY": 4, "IMMUTABLE": 5, - "UNORDERED_LIST": 6 + "UNORDERED_LIST": 6, + "NON_EMPTY_DEFAULT": 7 } }, "resourceReference": { From 45ea1d9671cab2dcfb56e572bdba54db5d81301e Mon Sep 17 00:00:00 2001 From: "F. Hinkelmann" Date: Thu, 21 Oct 2021 11:30:26 -0400 Subject: [PATCH 150/333] chore(cloud-rad): delete api-extractor config (#220) --- .../bigquery-storage/api-extractor.json | 369 ------------------ 1 file changed, 369 deletions(-) delete mode 100644 handwritten/bigquery-storage/api-extractor.json diff --git a/handwritten/bigquery-storage/api-extractor.json b/handwritten/bigquery-storage/api-extractor.json deleted file mode 100644 index de228294b23..00000000000 --- a/handwritten/bigquery-storage/api-extractor.json +++ /dev/null @@ -1,369 +0,0 @@ -/** - * Config file for API Extractor. For more info, please visit: https://api-extractor.com - */ -{ - "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - - /** - * Optionally specifies another JSON config file that this file extends from. This provides a way for - * standard settings to be shared across multiple projects. - * - * If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains - * the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be - * resolved using NodeJS require(). - * - * SUPPORTED TOKENS: none - * DEFAULT VALUE: "" - */ - // "extends": "./shared/api-extractor-base.json" - // "extends": "my-package/include/api-extractor-base.json" - - /** - * Determines the "" token that can be used with other config file settings. The project folder - * typically contains the tsconfig.json and package.json config files, but the path is user-defined. - * - * The path is resolved relative to the folder of the config file that contains the setting. - * - * The default value for "projectFolder" is the token "", which means the folder is determined by traversing - * parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder - * that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error - * will be reported. - * - * SUPPORTED TOKENS: - * DEFAULT VALUE: "" - */ - // "projectFolder": "..", - - /** - * (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor - * analyzes the symbols exported by this module. - * - * The file extension must be ".d.ts" and not ".ts". - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - */ - "mainEntryPointFilePath": "/protos/protos.d.ts", - - /** - * A list of NPM package names whose exports should be treated as part of this package. - * - * For example, suppose that Webpack is used to generate a distributed bundle for the project "library1", - * and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part - * of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly - * imports library2. To avoid this, we can specify: - * - * "bundledPackages": [ "library2" ], - * - * This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been - * local files for library1. - */ - "bundledPackages": [ ], - - /** - * Determines how the TypeScript compiler engine will be invoked by API Extractor. - */ - "compiler": { - /** - * Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * Note: This setting will be ignored if "overrideTsconfig" is used. - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "/tsconfig.json" - */ - // "tsconfigFilePath": "/tsconfig.json", - - /** - * Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk. - * The object must conform to the TypeScript tsconfig schema: - * - * http://json.schemastore.org/tsconfig - * - * If omitted, then the tsconfig.json file will be read from the "projectFolder". - * - * DEFAULT VALUE: no overrideTsconfig section - */ - // "overrideTsconfig": { - // . . . - // } - - /** - * This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended - * and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when - * dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses - * for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck. - * - * DEFAULT VALUE: false - */ - // "skipLibCheck": true, - }, - - /** - * Configures how the API report file (*.api.md) will be generated. - */ - "apiReport": { - /** - * (REQUIRED) Whether to generate an API report. - */ - "enabled": true, - - /** - * The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce - * a full file path. - * - * The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/". - * - * SUPPORTED TOKENS: , - * DEFAULT VALUE: ".api.md" - */ - // "reportFileName": ".api.md", - - /** - * Specifies the folder where the API report file is written. The file name portion is determined by - * the "reportFileName" setting. - * - * The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy, - * e.g. for an API review. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "/etc/" - */ - // "reportFolder": "/etc/", - - /** - * Specifies the folder where the temporary report file is written. The file name portion is determined by - * the "reportFileName" setting. - * - * After the temporary file is written to disk, it is compared with the file in the "reportFolder". - * If they are different, a production build will fail. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "/temp/" - */ - // "reportTempFolder": "/temp/" - }, - - /** - * Configures how the doc model file (*.api.json) will be generated. - */ - "docModel": { - /** - * (REQUIRED) Whether to generate a doc model file. - */ - "enabled": true, - - /** - * The output path for the doc model file. The file extension should be ".api.json". - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "/temp/.api.json" - */ - // "apiJsonFilePath": "/temp/.api.json" - }, - - /** - * Configures how the .d.ts rollup file will be generated. - */ - "dtsRollup": { - /** - * (REQUIRED) Whether to generate the .d.ts rollup file. - */ - "enabled": true, - - /** - * Specifies the output path for a .d.ts rollup file to be generated without any trimming. - * This file will include all declarations that are exported by the main entry point. - * - * If the path is an empty string, then this file will not be written. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "/dist/.d.ts" - */ - // "untrimmedFilePath": "/dist/.d.ts", - - /** - * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release. - * This file will include only declarations that are marked as "@public" or "@beta". - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "" - */ - // "betaTrimmedFilePath": "/dist/-beta.d.ts", - - - /** - * Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release. - * This file will include only declarations that are marked as "@public". - * - * If the path is an empty string, then this file will not be written. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "" - */ - // "publicTrimmedFilePath": "/dist/-public.d.ts", - - /** - * When a declaration is trimmed, by default it will be replaced by a code comment such as - * "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the - * declaration completely. - * - * DEFAULT VALUE: false - */ - // "omitTrimmingComments": true - }, - - /** - * Configures how the tsdoc-metadata.json file will be generated. - */ - "tsdocMetadata": { - /** - * Whether to generate the tsdoc-metadata.json file. - * - * DEFAULT VALUE: true - */ - // "enabled": true, - - /** - * Specifies where the TSDoc metadata file should be written. - * - * The path is resolved relative to the folder of the config file that contains the setting; to change this, - * prepend a folder token such as "". - * - * The default value is "", which causes the path to be automatically inferred from the "tsdocMetadata", - * "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup - * falls back to "tsdoc-metadata.json" in the package folder. - * - * SUPPORTED TOKENS: , , - * DEFAULT VALUE: "" - */ - // "tsdocMetadataFilePath": "/dist/tsdoc-metadata.json" - }, - - /** - * Specifies what type of newlines API Extractor should use when writing output files. By default, the output files - * will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead. - * To use the OS's default newline kind, specify "os". - * - * DEFAULT VALUE: "crlf" - */ - // "newlineKind": "crlf", - - /** - * Configures how API Extractor reports error and warning messages produced during analysis. - * - * There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages. - */ - "messages": { - /** - * Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing - * the input .d.ts files. - * - * TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551" - * - * DEFAULT VALUE: A single "default" entry with logLevel=warning. - */ - "compilerMessageReporting": { - /** - * Configures the default routing for messages that don't match an explicit rule in this table. - */ - "default": { - /** - * Specifies whether the message should be written to the the tool's output log. Note that - * the "addToApiReportFile" property may supersede this option. - * - * Possible values: "error", "warning", "none" - * - * Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail - * and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes - * the "--local" option), the warning is displayed but the build will not fail. - * - * DEFAULT VALUE: "warning" - */ - "logLevel": "warning", - - /** - * When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md), - * then the message will be written inside that file; otherwise, the message is instead logged according to - * the "logLevel" option. - * - * DEFAULT VALUE: false - */ - // "addToApiReportFile": false - }, - - // "TS2551": { - // "logLevel": "warning", - // "addToApiReportFile": true - // }, - // - // . . . - }, - - /** - * Configures handling of messages reported by API Extractor during its analysis. - * - * API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag" - * - * DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings - */ - "extractorMessageReporting": { - "default": { - "logLevel": "warning", - // "addToApiReportFile": false - }, - - // "ae-extra-release-tag": { - // "logLevel": "warning", - // "addToApiReportFile": true - // }, - // - // . . . - }, - - /** - * Configures handling of messages reported by the TSDoc parser when analyzing code comments. - * - * TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text" - * - * DEFAULT VALUE: A single "default" entry with logLevel=warning. - */ - "tsdocMessageReporting": { - "default": { - "logLevel": "warning", - // "addToApiReportFile": false - } - - // "tsdoc-link-tag-unescaped-text": { - // "logLevel": "warning", - // "addToApiReportFile": true - // }, - // - // . . . - } - } - -} From 4388d3adf1b959a5fa695fe7483b7b88f90481b8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 22 Oct 2021 11:00:28 -0700 Subject: [PATCH 151/333] docs(samples): add auto-generated samples for Node with api short name in region tag (#216) PiperOrigin-RevId: 399287285 Source-Link: https://github.com/googleapis/googleapis/commit/15759865d1c54e3d46429010f7e472fe6c3d3715 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b27fff623a5d8d586b703b5e4919856abe7c2eb3 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjI3ZmZmNjIzYTVkOGQ1ODZiNzAzYjVlNDkxOTg1NmFiZTdjMmViMyJ9 --- .../.github/.OwlBot.lock.yaml | 2 +- .../v1/big_query_read.create_read_session.js | 68 +++++++++++++++ .../generated/v1/big_query_read.read_rows.js | 66 ++++++++++++++ .../v1/big_query_read.split_read_stream.js | 62 +++++++++++++ .../v1/big_query_write.append_rows.js | 85 ++++++++++++++++++ ..._query_write.batch_commit_write_streams.js | 58 +++++++++++++ .../v1/big_query_write.create_write_stream.js | 58 +++++++++++++ .../big_query_write.finalize_write_stream.js | 53 ++++++++++++ .../v1/big_query_write.flush_rows.js | 57 ++++++++++++ .../v1/big_query_write.get_write_stream.js | 53 ++++++++++++ ...orage.batch_create_read_session_streams.js | 61 +++++++++++++ .../big_query_storage.create_read_session.js | 86 +++++++++++++++++++ .../big_query_storage.finalize_stream.js | 53 ++++++++++++ .../v1beta1/big_query_storage.read_rows.js | 63 ++++++++++++++ .../big_query_storage.split_read_stream.js | 63 ++++++++++++++ handwritten/bigquery-storage/src/index.ts | 12 ++- .../src/v1/big_query_read_client_config.json | 3 +- .../system-test/fixtures/sample/src/index.js | 2 +- .../system-test/fixtures/sample/src/index.ts | 8 +- 19 files changed, 904 insertions(+), 9 deletions(-) create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 73ad800accf..8d0a479d477 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest digest: sha256:bbb8dd6576ac58830a07fc17e9511ae898be44f2219d3344449b125df9854441 diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js new file mode 100644 index 00000000000..2469979b8cd --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -0,0 +1,68 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(parent, readSession) { + // [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The request project that owns the session, in the form of + * `projects/{project_id}`. + */ + // const parent = 'abc123' + /** + * Required. Session to be created. + */ + // const readSession = '' + /** + * Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table. Error + * will be returned if the max count is greater than the current system + * max limit of 1,000. + * Streams must be read starting from offset 0. + */ + // const maxStreamCount = 1234 + + // Imports the Storage library + const {BigQueryReadClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryReadClient(); + + async function createReadSession() { + // Construct request + const request = { + parent, + readSession, + }; + + // Run request + const response = await storageClient.createReadSession(request); + console.log(response); + } + + createReadSession(); + // [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js new file mode 100644 index 00000000000..8b9879e020c --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -0,0 +1,66 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(readStream) { + // [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Stream to read rows from. + */ + // const readStream = 'abc123' + /** + * The offset requested must be less than the last row read from Read. + * Requesting a larger offset is undefined. If not specified, start reading + * from offset zero. + */ + // const offset = 1234 + + // Imports the Storage library + const {BigQueryReadClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryReadClient(); + + async function readRows() { + // Construct request + const request = { + readStream, + }; + + // Run request + const stream = await storageClient.readRows(request); + stream.on('data', response => { + console.log(response); + }); + stream.on('error', err => { + throw err; + }); + stream.on('end', () => { + /* API call completed */ + }); + } + + readRows(); + // [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js new file mode 100644 index 00000000000..1a13b9bca0f --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(name) { + // [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Name of the stream to split. + */ + // const name = 'abc123' + /** + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to a data storage boundary on the server side. + */ + // const fraction = 1234 + + // Imports the Storage library + const {BigQueryReadClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryReadClient(); + + async function splitReadStream() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await storageClient.splitReadStream(request); + console.log(response); + } + + splitReadStream(); + // [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js new file mode 100644 index 00000000000..4e61d52dd25 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -0,0 +1,85 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(writeStream) { + // [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The write_stream identifies the target of the append operation, and only + * needs to be specified as part of the first request on the gRPC connection. + * If provided for subsequent requests, it must match the value of the first + * request. + * For explicitly created write streams, the format is: + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` + * For the special default stream, the format is: + * `projects/{project}/datasets/{dataset}/tables/{table}/_default`. + */ + // const writeStream = 'abc123' + /** + * If present, the write is only performed if the next append offset is same + * as the provided value. If not present, the write is performed at the + * current end of stream. Specifying a value for this field is not allowed + * when calling AppendRows for the '_default' stream. + */ + // const offset = '' + /** + * Rows in proto format. + */ + // const protoRows = '' + /** + * Id set by client to annotate its identity. Only initial request setting is + * respected. + */ + // const traceId = 'abc123' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function appendRows() { + // Construct request + const request = { + writeStream, + }; + + // Run request + const stream = await storageClient.appendRows(); + stream.on('data', response => { + console.log(response); + }); + stream.on('error', err => { + throw err; + }); + stream.on('end', () => { + /* API call completed */ + }); + stream.write(request); + stream.end(); + } + + appendRows(); + // [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js new file mode 100644 index 00000000000..5fddaaf9dd1 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -0,0 +1,58 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(parent, writeStreams) { + // [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Parent table that all the streams should belong to, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}`. + */ + // const parent = 'abc123' + /** + * Required. The group of streams that will be committed atomically. + */ + // const writeStreams = 'abc123' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function batchCommitWriteStreams() { + // Construct request + const request = { + parent, + writeStreams, + }; + + // Run request + const response = await storageClient.batchCommitWriteStreams(request); + console.log(response); + } + + batchCommitWriteStreams(); + // [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js new file mode 100644 index 00000000000..6feec1b5738 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -0,0 +1,58 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(parent, writeStream) { + // [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + */ + // const parent = 'abc123' + /** + * Required. Stream to be created. + */ + // const writeStream = '' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function createWriteStream() { + // Construct request + const request = { + parent, + writeStream, + }; + + // Run request + const response = await storageClient.createWriteStream(request); + console.log(response); + } + + createWriteStream(); + // [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js new file mode 100644 index 00000000000..c47a75441e5 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -0,0 +1,53 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(name) { + // [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Name of the stream to finalize, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + */ + // const name = 'abc123' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function finalizeWriteStream() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await storageClient.finalizeWriteStream(request); + console.log(response); + } + + finalizeWriteStream(); + // [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js new file mode 100644 index 00000000000..c5febb0f90e --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -0,0 +1,57 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(writeStream) { + // [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The stream that is the target of the flush operation. + */ + // const writeStream = 'abc123' + /** + * Ending offset of the flush operation. Rows before this offset(including + * this offset) will be flushed. + */ + // const offset = '' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function flushRows() { + // Construct request + const request = { + writeStream, + }; + + // Run request + const response = await storageClient.flushRows(request); + console.log(response); + } + + flushRows(); + // [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js new file mode 100644 index 00000000000..fef061c6387 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -0,0 +1,53 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(name) { + // [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Name of the stream to get, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + */ + // const name = 'abc123' + + // Imports the Storage library + const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; + + // Instantiates a client + const storageClient = new BigQueryWriteClient(); + + async function getWriteStream() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await storageClient.getWriteStream(request); + console.log(response); + } + + getWriteStream(); + // [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js new file mode 100644 index 00000000000..b7aec1c2b12 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -0,0 +1,61 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(session, requestedStreams) { + // [START bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Must be a non-expired session obtained from a call to + * CreateReadSession. Only the name field needs to be set. + */ + // const session = '' + /** + * Required. Number of new streams requested. Must be positive. + * Number of added streams may be less than this, see CreateReadSessionRequest + * for more information. + */ + // const requestedStreams = 1234 + + // Imports the Storage library + const {BigQueryStorageClient} = + require('@google-cloud/bigquery-storage').v1beta1; + + // Instantiates a client + const storageClient = new BigQueryStorageClient(); + + async function batchCreateReadSessionStreams() { + // Construct request + const request = { + session, + requestedStreams, + }; + + // Run request + const response = await storageClient.batchCreateReadSessionStreams(request); + console.log(response); + } + + batchCreateReadSessionStreams(); + // [END bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js new file mode 100644 index 00000000000..a133bd1defc --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -0,0 +1,86 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(tableReference, parent) { + // [START bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to read. + */ + // const tableReference = '' + /** + * Required. String of the form `projects/{project_id}` indicating the + * project this ReadSession is associated with. This is the project that will + * be billed for usage. + */ + // const parent = 'abc123' + /** + * Any modifiers to the Table (e.g. snapshot timestamp). + */ + // const tableModifiers = '' + /** + * Initial number of streams. If unset or 0, we will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table and + * the maximum amount of parallelism allowed by the system. + * Streams must be read starting from offset 0. + */ + // const requestedStreams = 1234 + /** + * Read options for this session (e.g. column selection, filters). + */ + // const readOptions = '' + /** + * Data output format. Currently default to Avro. + */ + // const format = '' + /** + * The strategy to use for distributing data among multiple streams. Currently + * defaults to liquid sharding. + */ + // const shardingStrategy = '' + + // Imports the Storage library + const {BigQueryStorageClient} = + require('@google-cloud/bigquery-storage').v1beta1; + + // Instantiates a client + const storageClient = new BigQueryStorageClient(); + + async function createReadSession() { + // Construct request + const request = { + tableReference, + parent, + }; + + // Run request + const response = await storageClient.createReadSession(request); + console.log(response); + } + + createReadSession(); + // [END bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js new file mode 100644 index 00000000000..ce23e01ed4a --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -0,0 +1,53 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(stream) { + // [START bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Stream to finalize. + */ + // const stream = '' + + // Imports the Storage library + const {BigQueryStorageClient} = + require('@google-cloud/bigquery-storage').v1beta1; + + // Instantiates a client + const storageClient = new BigQueryStorageClient(); + + async function finalizeStream() { + // Construct request + const request = { + stream, + }; + + // Run request + const response = await storageClient.finalizeStream(request); + console.log(response); + } + + finalizeStream(); + // [END bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js new file mode 100644 index 00000000000..029b2038077 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -0,0 +1,63 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(readPosition) { + // [START bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Identifier of the position in the stream to start reading from. + * The offset requested must be less than the last row read from ReadRows. + * Requesting a larger offset is undefined. + */ + // const readPosition = '' + + // Imports the Storage library + const {BigQueryStorageClient} = + require('@google-cloud/bigquery-storage').v1beta1; + + // Instantiates a client + const storageClient = new BigQueryStorageClient(); + + async function readRows() { + // Construct request + const request = { + readPosition, + }; + + // Run request + const stream = await storageClient.readRows(request); + stream.on('data', response => { + console.log(response); + }); + stream.on('error', err => { + throw err; + }); + stream.on('end', () => { + /* API call completed */ + }); + } + + readRows(); + // [END bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js new file mode 100644 index 00000000000..87b7bcc17e0 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -0,0 +1,63 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +function main(originalStream) { + // [START bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Stream to split. + */ + // const originalStream = '' + /** + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to to a data storage boundary on the server side. + */ + // const fraction = 1234 + + // Imports the Storage library + const {BigQueryStorageClient} = + require('@google-cloud/bigquery-storage').v1beta1; + + // Instantiates a client + const storageClient = new BigQueryStorageClient(); + + async function splitReadStream() { + // Construct request + const request = { + originalStream, + }; + + // Run request + const response = await storageClient.splitReadStream(request); + console.log(response); + } + + splitReadStream(); + // [END bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 3c75be7738d..8d6b763438d 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -20,11 +20,19 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; const BigQueryReadClient = v1.BigQueryReadClient; type BigQueryReadClient = v1.BigQueryReadClient; +const BigQueryWriteClient = v1.BigQueryWriteClient; +type BigQueryWriteClient = v1.BigQueryWriteClient; const BigQueryStorageClient = v1beta1.BigQueryStorageClient; type BigQueryStorageClient = v1beta1.BigQueryStorageClient; -export {v1, BigQueryReadClient, v1beta1, BigQueryStorageClient}; +export { + v1, + BigQueryReadClient, + v1beta1, + BigQueryStorageClient, + BigQueryWriteClient, +}; // For compatibility with JavaScript libraries we need to provide this default export: // tslint:disable-next-line no-default-export -export default {v1, BigQueryReadClient}; +export default {v1, BigQueryReadClient, BigQueryWriteClient}; import * as protos from '../protos/protos'; export {protos}; diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json index f129939b0ef..42b2735b9fe 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json @@ -34,7 +34,8 @@ "retry_params_name": "default" }, "SplitReadStream": { - "retry_codes_name": "non_idempotent", + "timeout_millis": 600000, + "retry_codes_name": "idempotent", "retry_params_name": "default" } } diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 0e917c476bd..583e1053b26 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -21,7 +21,7 @@ const storage = require('@google-cloud/bigquery-storage'); function main() { const bigQueryReadClient = new storage.BigQueryReadClient(); - const bigQueryStorageClient = new storage.BigQueryStorageClient(); + const bigQueryWriteClient = new storage.BigQueryWriteClient(); } main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index c2e01b6a3a2..a5aa9bb198f 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -18,14 +18,14 @@ import { BigQueryReadClient, - BigQueryStorageClient, + BigQueryWriteClient, } from '@google-cloud/bigquery-storage'; // check that the client class type name can be used function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { client.close(); } -function doStuffWithBigQueryStorageClient(client: BigQueryStorageClient) { +function doStuffWithBigQueryWriteClient(client: BigQueryWriteClient) { client.close(); } @@ -34,8 +34,8 @@ function main() { const bigQueryReadClient = new BigQueryReadClient(); doStuffWithBigQueryReadClient(bigQueryReadClient); // check that the client instance can be created - const bigQueryStorageClient = new BigQueryStorageClient(); - doStuffWithBigQueryStorageClient(bigQueryStorageClient); + const bigQueryWriteClient = new BigQueryWriteClient(); + doStuffWithBigQueryWriteClient(bigQueryWriteClient); } main(); From 22068bb9210d0ff4af6c8293086112ac6ab8aecc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 26 Oct 2021 23:20:36 +0200 Subject: [PATCH 152/333] chore(deps): update dependency @types/node to v16 (#221) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [@types/node](https://togithub.com/DefinitelyTyped/DefinitelyTyped) | [`^14.0.0` -> `^16.0.0`](https://renovatebot.com/diffs/npm/@types%2fnode/14.17.32/16.11.6) | [![age](https://badges.renovateapi.com/packages/npm/@types%2fnode/16.11.6/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/@types%2fnode/16.11.6/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/@types%2fnode/16.11.6/compatibility-slim/14.17.32)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/@types%2fnode/16.11.6/confidence-slim/14.17.32)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: "after 9am and before 3pm" (UTC). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index ea671823398..1d074e7123d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -31,7 +31,7 @@ }, "devDependencies": { "@types/mocha": "^8.0.0", - "@types/node": "^14.0.0", + "@types/node": "^16.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", "gts": "^2.0.0", From d2c187e473030856bf3b059022a14cdaf4332f6a Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Wed, 27 Oct 2021 06:32:18 -0700 Subject: [PATCH 153/333] chore: fix the wrong post processor image (#223) --- handwritten/bigquery-storage/.github/.OwlBot.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.yaml b/handwritten/bigquery-storage/.github/.OwlBot.yaml index 52a82ebed51..2d27e09de99 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-nodejs:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest deep-preserve-regex: - /owl-bot-staging/v1alpha2 From 3d2d3cbe4e036b00ac0db4f60b7fcb60cc979180 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Nov 2021 20:50:12 +0100 Subject: [PATCH 154/333] chore(deps): update dependency sinon to v12 (#224) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [sinon](https://sinonjs.org/) ([source](https://togithub.com/sinonjs/sinon)) | [`^11.0.0` -> `^12.0.0`](https://renovatebot.com/diffs/npm/sinon/11.1.2/12.0.1) | [![age](https://badges.renovateapi.com/packages/npm/sinon/12.0.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/sinon/12.0.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/sinon/12.0.1/compatibility-slim/11.1.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/sinon/12.0.1/confidence-slim/11.1.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
sinonjs/sinon ### [`v12.0.1`](https://togithub.com/sinonjs/sinon/blob/master/CHANGES.md#​1201) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v12.0.0...v12.0.1) - [`3f598221`](https://togithub.com/sinonjs/sinon/commit/3f598221045904681f2b3b3ba1df617ed5e230e3) Fix issue with npm unlink for npm version > 6 (Carl-Erik Kopseng) > 'npm unlink' would implicitly unlink the current dir > until version 7, which requires an argument - [`51417a38`](https://togithub.com/sinonjs/sinon/commit/51417a38111eeeb7cd14338bfb762cc2df487e1b) Fix bundling of cjs module ([#​2412](https://togithub.com/sinonjs/sinon/issues/2412)) (Julian Grinblat) > - Fix bundling of cjs module > > - Run prettier *Released by [Carl-Erik Kopseng](https://togithub.com/fatso83) on 2021-11-04.* #### 12.0.0 ### [`v12.0.0`](https://togithub.com/sinonjs/sinon/compare/v11.1.2...v12.0.0) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v11.1.2...v12.0.0)
--- ### Configuration 📅 **Schedule**: "after 9am and before 3pm" (UTC). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 1d074e7123d..a890bee3780 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -42,7 +42,7 @@ "mocha": "^8.0.0", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^11.0.0", + "sinon": "^12.0.0", "ts-loader": "^9.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", From 66dca228f95093bb470cd9c8cb99c827a65102a4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 15:14:09 -0800 Subject: [PATCH 155/333] docs(samples): add example tags to generated samples (#225) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs(samples): add example tags to generated samples PiperOrigin-RevId: 408439482 Source-Link: https://github.com/googleapis/googleapis/commit/b9f61843dc80c7c285fc34fd3a40aae55082c2b9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/eb888bc214efc7bf43bf4634b470254565a659a5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZWI4ODhiYzIxNGVmYzdiZjQzYmY0NjM0YjQ3MDI1NDU2NWE2NTlhNSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../bigquery-storage/linkinator.config.json | 2 +- .../v1/big_query_read.create_read_session.js | 6 +- .../generated/v1/big_query_read.read_rows.js | 4 +- .../v1/big_query_read.split_read_stream.js | 4 +- .../v1/big_query_write.append_rows.js | 8 +- ..._query_write.batch_commit_write_streams.js | 4 +- .../v1/big_query_write.create_write_stream.js | 6 +- .../big_query_write.finalize_write_stream.js | 4 +- .../v1/big_query_write.flush_rows.js | 6 +- .../v1/big_query_write.get_write_stream.js | 4 +- ...orage.batch_create_read_session_streams.js | 6 +- .../big_query_storage.create_read_session.js | 14 +- .../big_query_storage.finalize_stream.js | 6 +- .../v1beta1/big_query_storage.read_rows.js | 6 +- .../big_query_storage.split_read_stream.js | 6 +- .../src/v1/big_query_read_client.ts | 150 +++++----- .../src/v1/big_query_write_client.ts | 236 ++++++++-------- .../src/v1beta1/big_query_storage_client.ts | 258 +++++++++--------- 18 files changed, 361 insertions(+), 369 deletions(-) diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json index 29a223b6db6..0121dfa684f 100644 --- a/handwritten/bigquery-storage/linkinator.config.json +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -6,5 +6,5 @@ "img.shields.io" ], "silent": true, - "concurrency": 10 + "concurrency": 5 } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index 2469979b8cd..6514bcca250 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -27,7 +27,7 @@ function main(parent, readSession) { /** * Required. Session to be created. */ - // const readSession = '' + // const readSession = {} /** * Max initial number of streams. If unset or zero, the server will * provide a value of streams so as to produce reasonable throughput. Must be @@ -45,7 +45,7 @@ function main(parent, readSession) { // Instantiates a client const storageClient = new BigQueryReadClient(); - async function createReadSession() { + async function callCreateReadSession() { // Construct request const request = { parent, @@ -57,7 +57,7 @@ function main(parent, readSession) { console.log(response); } - createReadSession(); + callCreateReadSession(); // [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index 8b9879e020c..c853e64dd0f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -36,7 +36,7 @@ function main(readStream) { // Instantiates a client const storageClient = new BigQueryReadClient(); - async function readRows() { + async function callReadRows() { // Construct request const request = { readStream, @@ -55,7 +55,7 @@ function main(readStream) { }); } - readRows(); + callReadRows(); // [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index 1a13b9bca0f..12c8a99dbd9 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -40,7 +40,7 @@ function main(name) { // Instantiates a client const storageClient = new BigQueryReadClient(); - async function splitReadStream() { + async function callSplitReadStream() { // Construct request const request = { name, @@ -51,7 +51,7 @@ function main(name) { console.log(response); } - splitReadStream(); + callSplitReadStream(); // [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 4e61d52dd25..f48ff733f78 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -36,11 +36,11 @@ function main(writeStream) { * current end of stream. Specifying a value for this field is not allowed * when calling AppendRows for the '_default' stream. */ - // const offset = '' + // const offset = {} /** * Rows in proto format. */ - // const protoRows = '' + // const protoRows = {} /** * Id set by client to annotate its identity. Only initial request setting is * respected. @@ -53,7 +53,7 @@ function main(writeStream) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function appendRows() { + async function callAppendRows() { // Construct request const request = { writeStream, @@ -74,7 +74,7 @@ function main(writeStream) { stream.end(); } - appendRows(); + callAppendRows(); // [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 5fddaaf9dd1..9c290564f7d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -35,7 +35,7 @@ function main(parent, writeStreams) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function batchCommitWriteStreams() { + async function callBatchCommitWriteStreams() { // Construct request const request = { parent, @@ -47,7 +47,7 @@ function main(parent, writeStreams) { console.log(response); } - batchCommitWriteStreams(); + callBatchCommitWriteStreams(); // [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index 6feec1b5738..5e035e2afef 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -27,7 +27,7 @@ function main(parent, writeStream) { /** * Required. Stream to be created. */ - // const writeStream = '' + // const writeStream = {} // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; @@ -35,7 +35,7 @@ function main(parent, writeStream) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function createWriteStream() { + async function callCreateWriteStream() { // Construct request const request = { parent, @@ -47,7 +47,7 @@ function main(parent, writeStream) { console.log(response); } - createWriteStream(); + callCreateWriteStream(); // [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index c47a75441e5..bca7849f860 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -31,7 +31,7 @@ function main(name) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function finalizeWriteStream() { + async function callFinalizeWriteStream() { // Construct request const request = { name, @@ -42,7 +42,7 @@ function main(name) { console.log(response); } - finalizeWriteStream(); + callFinalizeWriteStream(); // [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index c5febb0f90e..e2280592c71 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -27,7 +27,7 @@ function main(writeStream) { * Ending offset of the flush operation. Rows before this offset(including * this offset) will be flushed. */ - // const offset = '' + // const offset = {} // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; @@ -35,7 +35,7 @@ function main(writeStream) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function flushRows() { + async function callFlushRows() { // Construct request const request = { writeStream, @@ -46,7 +46,7 @@ function main(writeStream) { console.log(response); } - flushRows(); + callFlushRows(); // [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index fef061c6387..4309dd091ea 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -31,7 +31,7 @@ function main(name) { // Instantiates a client const storageClient = new BigQueryWriteClient(); - async function getWriteStream() { + async function callGetWriteStream() { // Construct request const request = { name, @@ -42,7 +42,7 @@ function main(name) { console.log(response); } - getWriteStream(); + callGetWriteStream(); // [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index b7aec1c2b12..8a669bec643 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -23,7 +23,7 @@ function main(session, requestedStreams) { * Required. Must be a non-expired session obtained from a call to * CreateReadSession. Only the name field needs to be set. */ - // const session = '' + // const session = {} /** * Required. Number of new streams requested. Must be positive. * Number of added streams may be less than this, see CreateReadSessionRequest @@ -38,7 +38,7 @@ function main(session, requestedStreams) { // Instantiates a client const storageClient = new BigQueryStorageClient(); - async function batchCreateReadSessionStreams() { + async function callBatchCreateReadSessionStreams() { // Construct request const request = { session, @@ -50,7 +50,7 @@ function main(session, requestedStreams) { console.log(response); } - batchCreateReadSessionStreams(); + callBatchCreateReadSessionStreams(); // [END bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index a133bd1defc..11589c89dbb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -22,7 +22,7 @@ function main(tableReference, parent) { /** * Required. Reference to the table to read. */ - // const tableReference = '' + // const tableReference = {} /** * Required. String of the form `projects/{project_id}` indicating the * project this ReadSession is associated with. This is the project that will @@ -32,7 +32,7 @@ function main(tableReference, parent) { /** * Any modifiers to the Table (e.g. snapshot timestamp). */ - // const tableModifiers = '' + // const tableModifiers = {} /** * Initial number of streams. If unset or 0, we will * provide a value of streams so as to produce reasonable throughput. Must be @@ -45,16 +45,16 @@ function main(tableReference, parent) { /** * Read options for this session (e.g. column selection, filters). */ - // const readOptions = '' + // const readOptions = {} /** * Data output format. Currently default to Avro. */ - // const format = '' + // const format = {} /** * The strategy to use for distributing data among multiple streams. Currently * defaults to liquid sharding. */ - // const shardingStrategy = '' + // const shardingStrategy = {} // Imports the Storage library const {BigQueryStorageClient} = @@ -63,7 +63,7 @@ function main(tableReference, parent) { // Instantiates a client const storageClient = new BigQueryStorageClient(); - async function createReadSession() { + async function callCreateReadSession() { // Construct request const request = { tableReference, @@ -75,7 +75,7 @@ function main(tableReference, parent) { console.log(response); } - createReadSession(); + callCreateReadSession(); // [END bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index ce23e01ed4a..fc71cd3eb7f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -22,7 +22,7 @@ function main(stream) { /** * Required. Stream to finalize. */ - // const stream = '' + // const stream = {} // Imports the Storage library const {BigQueryStorageClient} = @@ -31,7 +31,7 @@ function main(stream) { // Instantiates a client const storageClient = new BigQueryStorageClient(); - async function finalizeStream() { + async function callFinalizeStream() { // Construct request const request = { stream, @@ -42,7 +42,7 @@ function main(stream) { console.log(response); } - finalizeStream(); + callFinalizeStream(); // [END bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index 029b2038077..c80e30bed1e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -24,7 +24,7 @@ function main(readPosition) { * The offset requested must be less than the last row read from ReadRows. * Requesting a larger offset is undefined. */ - // const readPosition = '' + // const readPosition = {} // Imports the Storage library const {BigQueryStorageClient} = @@ -33,7 +33,7 @@ function main(readPosition) { // Instantiates a client const storageClient = new BigQueryStorageClient(); - async function readRows() { + async function callReadRows() { // Construct request const request = { readPosition, @@ -52,7 +52,7 @@ function main(readPosition) { }); } - readRows(); + callReadRows(); // [END bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async] } diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index 87b7bcc17e0..f86ebf52e46 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -22,7 +22,7 @@ function main(originalStream) { /** * Required. Stream to split. */ - // const originalStream = '' + // const originalStream = {} /** * A value in the range (0.0, 1.0) that specifies the fractional point at * which the original stream should be split. The actual split point is @@ -41,7 +41,7 @@ function main(originalStream) { // Instantiates a client const storageClient = new BigQueryStorageClient(); - async function splitReadStream() { + async function callSplitReadStream() { // Construct request const request = { originalStream, @@ -52,7 +52,7 @@ function main(originalStream) { console.log(response); } - splitReadStream(); + callSplitReadStream(); // [END bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async] } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 10f28e88015..6c822fe16e2 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -321,40 +321,6 @@ export class BigQueryReadClient { // ------------------- // -- Service calls -- // ------------------- - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - >; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; /** * Creates a new read session. A read session divides the contents of a * BigQuery table into one or more streams, which can then be used to read @@ -399,9 +365,43 @@ export class BigQueryReadClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. - * @example - * const [response] = await client.createReadSession(request); + * @example include:samples/generated/v1/big_query_read.create_read_session.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async */ + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; createReadSession( request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, optionsOrCallback?: @@ -448,40 +448,6 @@ export class BigQueryReadClient { this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); } - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - >; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; /** * Splits a given `ReadStream` into two `ReadStream` objects. These * `ReadStream` objects are referred to as the primary and the residual @@ -515,9 +481,43 @@ export class BigQueryReadClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. - * @example - * const [response] = await client.splitReadStream(request); + * @example include:samples/generated/v1/big_query_read.split_read_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async */ + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; splitReadStream( request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, optionsOrCallback?: @@ -589,10 +589,8 @@ export class BigQueryReadClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) * for more details and examples. - * @example - * const stream = client.readRows(request); - * stream.on('data', (response) => { ... }); - * stream.on('end', () => { ... }); + * @example include:samples/generated/v1/big_query_read.read_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_ReadRows_async */ readRows( request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 62973bfa20a..c87b53c253d 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -327,6 +327,31 @@ export class BigQueryWriteClient { // ------------------- // -- Service calls -- // ------------------- + /** + * Creates a write stream to the given table. + * Additionally, every table has a special stream named '_default' + * to which data can be written. This stream doesn't need to be created using + * CreateWriteStream. It is a stream that can be used simultaneously by any + * number of clients. Data written to this stream is considered committed as + * soon as an acknowledgement is received. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {google.cloud.bigquery.storage.v1.WriteStream} request.writeStream + * Required. Stream to be created. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.create_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async + */ createWriteStream( request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, options?: CallOptions @@ -361,31 +386,6 @@ export class BigQueryWriteClient { {} | null | undefined > ): void; - /** - * Creates a write stream to the given table. - * Additionally, every table has a special stream named '_default' - * to which data can be written. This stream doesn't need to be created using - * CreateWriteStream. It is a stream that can be used simultaneously by any - * number of clients. Data written to this stream is considered committed as - * soon as an acknowledgement is received. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Reference to the table to which the stream belongs, in the format - * of `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param {google.cloud.bigquery.storage.v1.WriteStream} request.writeStream - * Required. Stream to be created. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.createWriteStream(request); - */ createWriteStream( request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, optionsOrCallback?: @@ -432,6 +432,24 @@ export class BigQueryWriteClient { this.initialize(); return this.innerApiCalls.createWriteStream(request, options, callback); } + /** + * Gets information about a write stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to get, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.get_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async + */ getWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, options?: CallOptions @@ -466,24 +484,6 @@ export class BigQueryWriteClient { {} | null | undefined > ): void; - /** - * Gets information about a write stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. Name of the stream to get, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.getWriteStream(request); - */ getWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, optionsOrCallback?: @@ -530,6 +530,25 @@ export class BigQueryWriteClient { this.initialize(); return this.innerApiCalls.getWriteStream(request, options, callback); } + /** + * Finalize a write stream so that no new data can be appended to the + * stream. Finalize is not supported on the '_default' stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to finalize, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.finalize_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async + */ finalizeWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, options?: CallOptions @@ -564,25 +583,6 @@ export class BigQueryWriteClient { {} | null | undefined > ): void; - /** - * Finalize a write stream so that no new data can be appended to the - * stream. Finalize is not supported on the '_default' stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. Name of the stream to finalize, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.finalizeWriteStream(request); - */ finalizeWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, optionsOrCallback?: @@ -629,6 +629,31 @@ export class BigQueryWriteClient { this.initialize(); return this.innerApiCalls.finalizeWriteStream(request, options, callback); } + /** + * Atomically commits a group of `PENDING` streams that belong to the same + * `parent` table. + * + * Streams must be finalized before commit and cannot be committed multiple + * times. Once a stream is committed, data in the stream becomes available + * for read operations. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Parent table that all the streams should belong to, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {string[]} request.writeStreams + * Required. The group of streams that will be committed atomically. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.batch_commit_write_streams.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async + */ batchCommitWriteStreams( request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, options?: CallOptions @@ -663,31 +688,6 @@ export class BigQueryWriteClient { {} | null | undefined > ): void; - /** - * Atomically commits a group of `PENDING` streams that belong to the same - * `parent` table. - * - * Streams must be finalized before commit and cannot be committed multiple - * times. Once a stream is committed, data in the stream becomes available - * for read operations. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Parent table that all the streams should belong to, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param {string[]} request.writeStreams - * Required. The group of streams that will be committed atomically. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.batchCommitWriteStreams(request); - */ batchCommitWriteStreams( request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, optionsOrCallback?: @@ -738,6 +738,33 @@ export class BigQueryWriteClient { callback ); } + /** + * Flushes rows to a BUFFERED stream. + * + * If users are appending rows to BUFFERED stream, flush operation is + * required in order for the rows to become available for reading. A + * Flush operation flushes up to any previously flushed offset in a BUFFERED + * stream, to the offset specified in the request. + * + * Flush is not supported on the _default stream, since it is not BUFFERED. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.writeStream + * Required. The stream that is the target of the flush operation. + * @param {google.protobuf.Int64Value} request.offset + * Ending offset of the flush operation. Rows before this offset(including + * this offset) will be flushed. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.flush_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async + */ flushRows( request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, options?: CallOptions @@ -769,33 +796,6 @@ export class BigQueryWriteClient { {} | null | undefined > ): void; - /** - * Flushes rows to a BUFFERED stream. - * - * If users are appending rows to BUFFERED stream, flush operation is - * required in order for the rows to become available for reading. A - * Flush operation flushes up to any previously flushed offset in a BUFFERED - * stream, to the offset specified in the request. - * - * Flush is not supported on the _default stream, since it is not BUFFERED. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.writeStream - * Required. The stream that is the target of the flush operation. - * @param {google.protobuf.Int64Value} request.offset - * Ending offset of the flush operation. Rows before this offset(including - * this offset) will be flushed. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.flushRows(request); - */ flushRows( request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, optionsOrCallback?: @@ -882,12 +882,8 @@ export class BigQueryWriteClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) * for more details and examples. - * @example - * const stream = client.appendRows(); - * stream.on('data', (response) => { ... }); - * stream.on('end', () => { ... }); - * stream.write(request); - * stream.end(); + * @example include:samples/generated/v1/big_query_write.append_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async */ appendRows(options?: CallOptions): gax.CancellableStream { this.initialize(); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 0ddad79167e..400ba6c1265 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -318,40 +318,6 @@ export class BigQueryStorageClient { // ------------------- // -- Service calls -- // ------------------- - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - >; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; /** * Creates a new read session. A read session divides the contents of a * BigQuery table into one or more streams, which can then be used to read @@ -398,9 +364,43 @@ export class BigQueryStorageClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. - * @example - * const [response] = await client.createReadSession(request); + * @example include:samples/generated/v1beta1/big_query_storage.create_read_session.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async */ + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; createReadSession( request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, optionsOrCallback?: @@ -448,6 +448,30 @@ export class BigQueryStorageClient { this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); } + /** + * Creates additional streams for a ReadSession. This API can be used to + * dynamically adjust the parallelism of a batch processing task upwards by + * adding additional workers. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} request.session + * Required. Must be a non-expired session obtained from a call to + * CreateReadSession. Only the name field needs to be set. + * @param {number} request.requestedStreams + * Required. Number of new streams requested. Must be positive. + * Number of added streams may be less than this, see CreateReadSessionRequest + * for more information. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async + */ batchCreateReadSessionStreams( request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, options?: CallOptions @@ -482,30 +506,6 @@ export class BigQueryStorageClient { {} | null | undefined > ): void; - /** - * Creates additional streams for a ReadSession. This API can be used to - * dynamically adjust the parallelism of a batch processing task upwards by - * adding additional workers. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} request.session - * Required. Must be a non-expired session obtained from a call to - * CreateReadSession. Only the name field needs to be set. - * @param {number} request.requestedStreams - * Required. Number of new streams requested. Must be positive. - * Number of added streams may be less than this, see CreateReadSessionRequest - * for more information. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.batchCreateReadSessionStreams(request); - */ batchCreateReadSessionStreams( request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, optionsOrCallback?: @@ -556,6 +556,36 @@ export class BigQueryStorageClient { callback ); } + /** + * Triggers the graceful termination of a single stream in a ReadSession. This + * API can be used to dynamically adjust the parallelism of a batch processing + * task downwards without losing data. + * + * This API does not delete the stream -- it remains visible in the + * ReadSession, and any data processed by the stream is not released to other + * streams. However, no additional data will be assigned to the stream once + * this call completes. Callers must continue reading data on the stream until + * the end of the stream is reached so that data which has already been + * assigned to the stream will be processed. + * + * This method will return an error if there are no other live streams + * in the Session, or if SplitReadStream() has been called on the given + * Stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream + * Required. Stream to finalize. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.finalize_stream.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async + */ finalizeStream( request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, options?: CallOptions @@ -590,36 +620,6 @@ export class BigQueryStorageClient { {} | null | undefined > ): void; - /** - * Triggers the graceful termination of a single stream in a ReadSession. This - * API can be used to dynamically adjust the parallelism of a batch processing - * task downwards without losing data. - * - * This API does not delete the stream -- it remains visible in the - * ReadSession, and any data processed by the stream is not released to other - * streams. However, no additional data will be assigned to the stream once - * this call completes. Callers must continue reading data on the stream until - * the end of the stream is reached so that data which has already been - * assigned to the stream will be processed. - * - * This method will return an error if there are no other live streams - * in the Session, or if SplitReadStream() has been called on the given - * Stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream - * Required. Stream to finalize. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example - * const [response] = await client.finalizeStream(request); - */ finalizeStream( request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, optionsOrCallback?: @@ -666,40 +666,6 @@ export class BigQueryStorageClient { this.initialize(); return this.innerApiCalls.finalizeStream(request, options, callback); } - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - >; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; /** * Splits a given read stream into two Streams. These streams are referred to * as the primary and the residual of the split. The original stream can still @@ -734,9 +700,43 @@ export class BigQueryStorageClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. - * @example - * const [response] = await client.splitReadStream(request); + * @example include:samples/generated/v1beta1/big_query_storage.split_read_stream.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async */ + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; splitReadStream( request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, optionsOrCallback?: @@ -808,10 +808,8 @@ export class BigQueryStorageClient { * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) * for more details and examples. - * @example - * const stream = client.readRows(request); - * stream.on('data', (response) => { ... }); - * stream.on('end', () => { ... }); + * @example include:samples/generated/v1beta1/big_query_storage.read_rows.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async */ readRows( request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, From ca7635c5577d005d05129672cdc53c03a16bee3c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Dec 2021 22:52:22 +0000 Subject: [PATCH 156/333] build: add generated samples to .eslintignore (#229) --- handwritten/bigquery-storage/.eslintignore | 1 + handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.eslintignore b/handwritten/bigquery-storage/.eslintignore index 9340ad9b86d..ea5b04aebe6 100644 --- a/handwritten/bigquery-storage/.eslintignore +++ b/handwritten/bigquery-storage/.eslintignore @@ -4,3 +4,4 @@ test/fixtures build/ docs/ protos/ +samples/generated/ diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 8d0a479d477..8a63b10a9dd 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:bbb8dd6576ac58830a07fc17e9511ae898be44f2219d3344449b125df9854441 + digest: sha256:ba3f2990fefe465f89834e4c46f847ddb141afa54daa6a1d462928fa679ed143 From 430298000a0f043911a1b2a22dee013ceaf916c8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Dec 2021 19:56:15 +0000 Subject: [PATCH 157/333] docs(node): support "stable"/"preview" release level (#1312) (#234) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/README.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 8a63b10a9dd..1b6a76cc483 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:ba3f2990fefe465f89834e4c46f847ddb141afa54daa6a1d462928fa679ed143 + digest: sha256:5ed10ba99cd1ea8c3a0f29b4c53e8a2723a101952705baed6b61783111c64c1c diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index f7f2caa856b..939e52d27fc 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -229,6 +229,8 @@ are addressed with the highest priority. + + More Information: [Google Cloud Platform Launch Stages][launch_stages] [launch_stages]: https://cloud.google.com/terms/launch-stages From 0375433c1afb7b676e06dcb86fac85d596a0af82 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Dec 2021 20:40:10 +0000 Subject: [PATCH 158/333] feat: add write_mode support for BigQuery Storage Write API v1 (#228) Committer: @anahan PiperOrigin-RevId: 414771198 Source-Link: https://github.com/googleapis/googleapis/commit/8a2398e34424ba7368a5195882386b16cd1b076c Source-Link: https://github.com/googleapis/googleapis-gen/commit/9df7ea386b59d5cc397a392498d3c1a5e5a673cb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWRmN2VhMzg2YjU5ZDVjYzM5N2EzOTI0OThkM2MxYTVlNWE2NzNjYiJ9 See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Steffany Brown <30247553+steffnay@users.noreply.github.com> Co-authored-by: Benjamin E. Coe --- .../cloud/bigquery/storage/v1/stream.proto | 13 +++++ .../bigquery-storage/protos/protos.d.ts | 12 +++++ handwritten/bigquery-storage/protos/protos.js | 49 +++++++++++++++++++ .../bigquery-storage/protos/protos.json | 13 +++++ 4 files changed, 87 insertions(+) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index dc62e8c00ca..0b0bc1ad05b 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -171,6 +171,16 @@ message WriteStream { BUFFERED = 3; } + // Mode enum of the stream. + enum WriteMode { + // Unknown type. + WRITE_MODE_UNSPECIFIED = 0; + + // Insert new records into the table. + // It is the default value if customers do not specify it. + INSERT = 1; + } + // Output only. Name of the stream, in the form // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -193,4 +203,7 @@ message WriteStream { // compatible with this schema to send in initial `AppendRowsRequest`. // The table schema could go out of date during the life time of the stream. TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Mode of the stream. + WriteMode write_mode = 7 [(google.api.field_behavior) = IMMUTABLE]; } diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index ba247175654..a113f0d4da9 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -3447,6 +3447,9 @@ export namespace google { /** WriteStream tableSchema */ tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** WriteStream writeMode */ + writeMode?: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null); } /** Represents a WriteStream. */ @@ -3473,6 +3476,9 @@ export namespace google { /** WriteStream tableSchema. */ public tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + /** WriteStream writeMode. */ + public writeMode: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode); + /** * Creates a new WriteStream instance using the specified properties. * @param [properties] Properties to set @@ -3553,6 +3559,12 @@ export namespace google { PENDING = 2, BUFFERED = 3 } + + /** WriteMode enum. */ + enum WriteMode { + WRITE_MODE_UNSPECIFIED = 0, + INSERT = 1 + } } /** Properties of a TableSchema. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index e74a907a0bf..83ac3676d44 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -7913,6 +7913,7 @@ * @property {google.protobuf.ITimestamp|null} [createTime] WriteStream createTime * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [tableSchema] WriteStream tableSchema + * @property {google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null} [writeMode] WriteStream writeMode */ /** @@ -7970,6 +7971,14 @@ */ WriteStream.prototype.tableSchema = null; + /** + * WriteStream writeMode. + * @member {google.cloud.bigquery.storage.v1.WriteStream.WriteMode} writeMode + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.writeMode = 0; + /** * Creates a new WriteStream instance using the specified properties. * @function create @@ -8004,6 +8013,8 @@ $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.tableSchema != null && Object.hasOwnProperty.call(message, "tableSchema")) $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.writeMode != null && Object.hasOwnProperty.call(message, "writeMode")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.writeMode); return writer; }; @@ -8053,6 +8064,9 @@ case 5: message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); break; + case 7: + message.writeMode = reader.int32(); + break; default: reader.skipType(tag & 7); break; @@ -8116,6 +8130,14 @@ if (error) return "tableSchema." + error; } + if (message.writeMode != null && message.hasOwnProperty("writeMode")) + switch (message.writeMode) { + default: + return "writeMode: enum value expected"; + case 0: + case 1: + break; + } return null; }; @@ -8166,6 +8188,16 @@ throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.tableSchema: object expected"); message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.tableSchema); } + switch (object.writeMode) { + case "WRITE_MODE_UNSPECIFIED": + case 0: + message.writeMode = 0; + break; + case "INSERT": + case 1: + message.writeMode = 1; + break; + } return message; }; @@ -8188,6 +8220,7 @@ object.createTime = null; object.commitTime = null; object.tableSchema = null; + object.writeMode = options.enums === String ? "WRITE_MODE_UNSPECIFIED" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -8199,6 +8232,8 @@ object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) object.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.tableSchema, options); + if (message.writeMode != null && message.hasOwnProperty("writeMode")) + object.writeMode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] : message.writeMode; return object; }; @@ -8231,6 +8266,20 @@ return values; })(); + /** + * WriteMode enum. + * @name google.cloud.bigquery.storage.v1.WriteStream.WriteMode + * @enum {number} + * @property {number} WRITE_MODE_UNSPECIFIED=0 WRITE_MODE_UNSPECIFIED value + * @property {number} INSERT=1 INSERT value + */ + WriteStream.WriteMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "WRITE_MODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "INSERT"] = 1; + return values; + })(); + return WriteStream; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 4feebdb2d72..651ae7e0329 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -835,6 +835,13 @@ "options": { "(google.api.field_behavior)": "OUTPUT_ONLY" } + }, + "writeMode": { + "type": "WriteMode", + "id": 7, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } } }, "nested": { @@ -845,6 +852,12 @@ "PENDING": 2, "BUFFERED": 3 } + }, + "WriteMode": { + "values": { + "WRITE_MODE_UNSPECIFIED": 0, + "INSERT": 1 + } } } }, From 76a04f7483d26b9cbc5f643514090009ce33a3a8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Dec 2021 20:48:11 +0000 Subject: [PATCH 159/333] chore: release 2.8.0 (#235) :robot: I have created a release \*beep\* \*boop\* --- ## [2.8.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.7.0...v2.8.0) (2021-12-30) ### Features * add write_mode support for BigQuery Storage Write API v1 ([#228](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/228)) ([18f3123](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/18f3123275716d49460f77cbbc1a4547412087d2)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 8d10cad8bf0..7f8d97f132f 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.8.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.7.0...v2.8.0) (2021-12-30) + + +### Features + +* add write_mode support for BigQuery Storage Write API v1 ([#228](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/228)) ([18f3123](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/18f3123275716d49460f77cbbc1a4547412087d2)) + ## [2.7.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.2...v2.7.0) (2021-09-27) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a890bee3780..6c37a661de2 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.7.0", + "version": "2.8.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", From 4c6b667f5e4ad84819ca0bd1c6a300e438a904bb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Dec 2021 23:18:30 +0000 Subject: [PATCH 160/333] docs(badges): tweak badge to use new preview/stable language (#1314) (#236) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/README.md | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 1b6a76cc483..497345b83de 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:5ed10ba99cd1ea8c3a0f29b4c53e8a2723a101952705baed6b61783111c64c1c + digest: sha256:f092066de33d4a2a13ab13c8fa9dcb4f6b96fa1fb7d391bf19cd0c4921d997c0 diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 939e52d27fc..103d0dd88a7 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -6,7 +6,6 @@ [![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) [![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) -[![codecov](https://img.shields.io/codecov/c/github/googleapis/nodejs-bigquery-storage/main.svg?style=flat)](https://codecov.io/gh/googleapis/nodejs-bigquery-storage) From 329b3e8531315bdf1789b7e2a77bd0bdbccaf2fd Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 30 Dec 2021 18:48:17 -0500 Subject: [PATCH 161/333] chore: add api_shortname and library_type to repo metadata (#233) Update .repo-metadata.json as required by go/library-data-integrity --- handwritten/bigquery-storage/.repo-metadata.json | 6 ++++-- handwritten/bigquery-storage/README.md | 9 ++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index 4283f1fbab0..6d5e38471a7 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -1,6 +1,6 @@ { "distribution_name": "@google-cloud/bigquery-storage", - "release_level": "ga", + "release_level": "stable", "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", "repo": "googleapis/nodejs-bigquery-storage", "default_version": "v1", @@ -11,5 +11,7 @@ "name": "bigquerystorage", "name_pretty": "Google BigQuery Storage", "api_id": "bigquerystorage.googleapis.com", - "codeowner_team": "@googleapis/api-bigquery" + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquerystorage", + "library_type": "GAPIC_AUTO" } diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 103d0dd88a7..8b0a9a7982b 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -4,7 +4,7 @@ # [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/nodejs-bigquery-storage) -[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) [![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) @@ -218,10 +218,10 @@ _Legacy Node.js versions are supported as a best effort:_ This library follows [Semantic Versioning](http://semver.org/). -This library is considered to be **General Availability (GA)**. This means it -is stable; the code surface will not change in backwards-incompatible ways + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways unless absolutely necessary (e.g. because of critical security issues) or with -an extensive deprecation period. Issues and requests against **GA** libraries +an extensive deprecation period. Issues and requests against **stable** libraries are addressed with the highest priority. @@ -229,7 +229,6 @@ are addressed with the highest priority. - More Information: [Google Cloud Platform Launch Stages][launch_stages] [launch_stages]: https://cloud.google.com/terms/launch-stages From f1112aaad388ca1add315bb8adfe1b155f2109d8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 17:08:25 +0000 Subject: [PATCH 162/333] test(nodejs): remove 15 add 16 (#1322) (#238) --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 497345b83de..6831fd8e18c 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:f092066de33d4a2a13ab13c8fa9dcb4f6b96fa1fb7d391bf19cd0c4921d997c0 + digest: sha256:3563b6b264989c4f5aa31a3682e4df36c95756cfef275d3201508947cbfc511e diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index a113f0d4da9..12ded42b9a9 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 83ac3676d44..930ac6091f0 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From f2ff6148e026703831d47c4a978e6ec46af4d9bc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 19:24:21 +0000 Subject: [PATCH 163/333] chore: update github issue templates (#1085) (#240) --- .../bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- .../.github/ISSUE_TEMPLATE/bug_report.md | 2 +- .../.github/ISSUE_TEMPLATE/config.yml | 4 ++++ .../.github/ISSUE_TEMPLATE/feature_request.md | 2 +- .../.github/ISSUE_TEMPLATE/question.md | 12 ++++++++++++ 5 files changed, 19 insertions(+), 3 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 6831fd8e18c..cbbb175848c 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:3563b6b264989c4f5aa31a3682e4df36c95756cfef275d3201508947cbfc511e + digest: sha256:2d850512335d7adca3a4b08e02f8e63192978aea88c042dacb3e382aa996ae7c diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md index 3902f23b2d7..0ad95022413 100644 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,7 +1,7 @@ --- name: Bug report about: Create a report to help us improve - +labels: 'type: bug, priority: p2' --- Thanks for stopping by to let us know something could be better! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..603b90133b6 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,4 @@ +contact_links: + - name: Google Cloud Support + url: https://cloud.google.com/support/ + about: If you have a support contract with Google, please use the Google Cloud Support portal. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md index 6365857f33c..b0327dfa02e 100644 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea for this library - +labels: 'type: feature request, priority: p3' --- Thanks for stopping by to let us know something could be better! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000000..97323113911 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,12 @@ +--- +name: Question +about: Ask a question +labels: 'type: question, priority: p3' +--- + +Thanks for stopping by to ask us a question! Please make sure to include: +- What you're trying to do +- What code you've already tried +- Any error messages you're getting + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. From a8ba9906857bf622da5396843ccb7def9b96e6e7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 17:58:31 +0000 Subject: [PATCH 164/333] build(node): switch back to keystore for publication (#1328) (#243) --- .../bigquery-storage/.github/.OwlBot.lock.yaml | 2 +- handwritten/bigquery-storage/.kokoro/publish.sh | 2 +- .../bigquery-storage/.kokoro/release/publish.cfg | 11 ++++++++++- handwritten/bigquery-storage/README.md | 4 ++++ 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index cbbb175848c..2c37ca7a7b2 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:2d850512335d7adca3a4b08e02f8e63192978aea88c042dacb3e382aa996ae7c + digest: sha256:89c5b2f3decec8ad64febbebea671076c119d1ab43700da380846a315600de8a diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index 4db6bf1c7f5..77a5defb2b5 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -24,7 +24,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / cd $(dirname $0)/.. -NPM_TOKEN=$(cat $KOKORO_GFILE_DIR/secret_manager/npm_publish_token) +NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc npm install diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 6e84cf465c9..ba6547f468f 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -7,9 +7,18 @@ before_action { } } +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-npm-token-1" + } + } +} + env_vars: { key: "SECRET_MANAGER_KEYS" - value: "npm_publish_token,releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 8b0a9a7982b..ed71e35db22 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -185,7 +185,11 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | +| Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | +| Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | +| Customer_record_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/customer_record_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/customer_record_pb.js,samples/README.md) | | BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Sample_data_pb2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb2.js,samples/README.md) | From 1ccebc32a514855e3b094d80deac58b1d0ad28e6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 Jan 2022 20:38:51 +0100 Subject: [PATCH 165/333] chore(deps): update dependency gts to v3 (#239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update dependency gts to v3 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Steffany Brown <30247553+steffnay@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6c37a661de2..02692b0ed74 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -34,7 +34,7 @@ "@types/node": "^16.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", - "gts": "^2.0.0", + "gts": "^3.0.0", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", From 9ab5513ab360b0ed8ffd2f11ed34147460fe7b4a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 11:49:05 -0800 Subject: [PATCH 166/333] chore(deps): upgrade gapic-generator-java to 2.4.1 (#242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): upgrade gapic-generator-java to 2.4.1 PiperOrigin-RevId: 422607515 Source-Link: https://github.com/googleapis/googleapis/commit/ba2ffd6fe6642e28b4fed2ffae217b4c5f084034 Source-Link: https://github.com/googleapis/googleapis-gen/commit/73ba4add239a619da567ffbd4e5730fdd6de04d3 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzNiYTRhZGQyMzlhNjE5ZGE1NjdmZmJkNGU1NzMwZmRkNmRlMDRkMyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Steffany Brown <30247553+steffnay@users.noreply.github.com> --- handwritten/bigquery-storage/.jsdoc.js | 4 ++-- .../v1/big_query_read.create_read_session.js | 1 + .../generated/v1/big_query_read.read_rows.js | 13 ++++--------- .../v1/big_query_read.split_read_stream.js | 1 + .../generated/v1/big_query_write.append_rows.js | 15 +++++---------- ...big_query_write.batch_commit_write_streams.js | 1 + .../v1/big_query_write.create_write_stream.js | 1 + .../v1/big_query_write.finalize_write_stream.js | 1 + .../generated/v1/big_query_write.flush_rows.js | 1 + .../v1/big_query_write.get_write_stream.js | 1 + ..._storage.batch_create_read_session_streams.js | 4 ++-- .../big_query_storage.create_read_session.js | 4 ++-- .../v1beta1/big_query_storage.finalize_stream.js | 4 ++-- .../v1beta1/big_query_storage.read_rows.js | 16 +++++----------- .../big_query_storage.split_read_stream.js | 4 ++-- .../src/v1/big_query_read_client.ts | 2 +- .../src/v1/big_query_write_client.ts | 2 +- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../src/v1beta1/big_query_storage_client.ts | 2 +- .../bigquery-storage/src/v1beta1/index.ts | 2 +- .../system-test/fixtures/sample/src/index.js | 2 +- .../system-test/fixtures/sample/src/index.ts | 2 +- .../bigquery-storage/system-test/install.ts | 2 +- .../test/gapic_big_query_read_v1.ts | 2 +- .../test/gapic_big_query_storage_v1beta1.ts | 2 +- .../test/gapic_big_query_write_v1.ts | 2 +- 26 files changed, 42 insertions(+), 51 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 9451ad92fe9..21870f2a75f 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2021 Google LLC', + copyright: 'Copyright 2022 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index 6514bcca250..a14c16b7ba0 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(parent, readSession) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index c853e64dd0f..a51ad69dd47 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(readStream) { @@ -44,15 +45,9 @@ function main(readStream) { // Run request const stream = await storageClient.readRows(request); - stream.on('data', response => { - console.log(response); - }); - stream.on('error', err => { - throw err; - }); - stream.on('end', () => { - /* API call completed */ - }); + stream.on('data', (response) => { console.log(response) }); + stream.on('error', (err) => { throw(err) }); + stream.on('end', () => { /* API call completed */ }); } callReadRows(); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index 12c8a99dbd9..478f78e1e90 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(name) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index f48ff733f78..51db4c07a0e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(writeStream) { @@ -61,17 +62,11 @@ function main(writeStream) { // Run request const stream = await storageClient.appendRows(); - stream.on('data', response => { - console.log(response); - }); - stream.on('error', err => { - throw err; - }); - stream.on('end', () => { - /* API call completed */ - }); + stream.on('data', (response) => { console.log(response) }); + stream.on('error', (err) => { throw(err) }); + stream.on('end', () => { /* API call completed */ }); stream.write(request); - stream.end(); + stream.end(); } callAppendRows(); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 9c290564f7d..a9f7ea031e7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(parent, writeStreams) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index 5e035e2afef..4537cb615f3 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(parent, writeStream) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index bca7849f860..bb660301138 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(name) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index e2280592c71..94f1f5253a9 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(writeStream) { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 4309dd091ea..4f2caa2a80c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(name) { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index 8a669bec643..ed64422d3a1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(session, requestedStreams) { @@ -32,8 +33,7 @@ function main(session, requestedStreams) { // const requestedStreams = 1234 // Imports the Storage library - const {BigQueryStorageClient} = - require('@google-cloud/bigquery-storage').v1beta1; + const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage').v1beta1; // Instantiates a client const storageClient = new BigQueryStorageClient(); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index 11589c89dbb..9cf38ca4274 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(tableReference, parent) { @@ -57,8 +58,7 @@ function main(tableReference, parent) { // const shardingStrategy = {} // Imports the Storage library - const {BigQueryStorageClient} = - require('@google-cloud/bigquery-storage').v1beta1; + const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage').v1beta1; // Instantiates a client const storageClient = new BigQueryStorageClient(); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index fc71cd3eb7f..33ca0da184a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(stream) { @@ -25,8 +26,7 @@ function main(stream) { // const stream = {} // Imports the Storage library - const {BigQueryStorageClient} = - require('@google-cloud/bigquery-storage').v1beta1; + const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage').v1beta1; // Instantiates a client const storageClient = new BigQueryStorageClient(); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index c80e30bed1e..5436f8f83fb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(readPosition) { @@ -27,8 +28,7 @@ function main(readPosition) { // const readPosition = {} // Imports the Storage library - const {BigQueryStorageClient} = - require('@google-cloud/bigquery-storage').v1beta1; + const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage').v1beta1; // Instantiates a client const storageClient = new BigQueryStorageClient(); @@ -41,15 +41,9 @@ function main(readPosition) { // Run request const stream = await storageClient.readRows(request); - stream.on('data', response => { - console.log(response); - }); - stream.on('error', err => { - throw err; - }); - stream.on('end', () => { - /* API call completed */ - }); + stream.on('data', (response) => { console.log(response) }); + stream.on('error', (err) => { throw(err) }); + stream.on('end', () => { /* API call completed */ }); } callReadRows(); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index f86ebf52e46..a517cdf5562 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. + 'use strict'; function main(originalStream) { @@ -35,8 +36,7 @@ function main(originalStream) { // const fraction = 1234 // Imports the Storage library - const {BigQueryStorageClient} = - require('@google-cloud/bigquery-storage').v1beta1; + const {BigQueryStorageClient} = require('@google-cloud/bigquery-storage').v1beta1; // Instantiates a client const storageClient = new BigQueryStorageClient(); diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 6c822fe16e2..f133143b2fe 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index c87b53c253d..b775bf6fe29 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index e1965026c9e..f3bacd94214 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 400ba6c1265..72b13b1ef43 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index 2c9b3f3f526..dc3afed8ea7 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 583e1053b26..d59c13c62cd 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index a5aa9bb198f..6fd6e3ca7ee 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index d2d61c0396f..6dd1eaadafa 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index eeb7c0e2cf2..a5c551d3198 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index b4821b15682..3365f629aee 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index e4ba9ae6bdc..608b649713f 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 050d7ba661a1c9ec1d7fd677ec6cc2005f869169 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 22:08:31 +0000 Subject: [PATCH 167/333] docs(nodejs): version support policy edits (#1346) (#248) --- .../.github/.OwlBot.lock.yaml | 15 +++++++++++- handwritten/bigquery-storage/README.md | 24 +++++++++---------- 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 2c37ca7a7b2..84059c19485 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,3 +1,16 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:89c5b2f3decec8ad64febbebea671076c119d1ab43700da380846a315600de8a + digest: sha256:a9d166a74752226923d159cb723df53429e226c9c076dad3ca52ffd073ff3bb4 diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index ed71e35db22..40dfb1f6162 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -201,21 +201,21 @@ also contains samples. Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). Libraries are compatible with all current _active_ and _maintenance_ versions of Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. -Client libraries targeting some end-of-life versions of Node.js are available, and -can be installed via npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). -The dist-tags follow the naming convention `legacy-(version)`. - -_Legacy Node.js versions are supported as a best effort:_ +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: -* Legacy versions will not be tested in continuous integration. -* Some security patches may not be able to be backported. -* Dependencies will not be kept up-to-date, and features will not be backported. +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. -#### Legacy tags available - -* `legacy-8`: install client libraries from this dist-tag for versions - compatible with Node.js 8. +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/bigquery-storage@legacy-8` installs client libraries +for versions compatible with Node.js 8. ## Versioning From 224caab20f5c908c99b2807f48f9a1e205459a7c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Feb 2022 18:38:56 +0100 Subject: [PATCH 168/333] chore(deps): update dependency sinon to v13 (#246) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [sinon](https://sinonjs.org/) ([source](https://togithub.com/sinonjs/sinon)) | [`^12.0.0` -> `^13.0.0`](https://renovatebot.com/diffs/npm/sinon/12.0.1/13.0.1) | [![age](https://badges.renovateapi.com/packages/npm/sinon/13.0.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/sinon/13.0.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/sinon/13.0.1/compatibility-slim/12.0.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/sinon/13.0.1/confidence-slim/12.0.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
sinonjs/sinon ### [`v13.0.1`](https://togithub.com/sinonjs/sinon/blob/HEAD/CHANGES.md#​1301) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v13.0.0...v13.0.1) - [`ec4223f9`](https://togithub.com/sinonjs/sinon/commit/ec4223f94076d809483e3c6a7536cfc1278dd3c9) Bump nise to fix [sinonjs/nise#​193](https://togithub.com/sinonjs/nise/issues/193) (Carl-Erik Kopseng) - [`f329a010`](https://togithub.com/sinonjs/sinon/commit/f329a01040bfa5a79e039419220b21eda56935d6) Add unimported to workflow ([#​2441](https://togithub.com/sinonjs/sinon/issues/2441)) (Morgan Roderick) - [`7f16cec9`](https://togithub.com/sinonjs/sinon/commit/7f16cec968c3c8d4e580267fb00195916d6f827d) Enable updates to same major version (Carl-Erik Kopseng) - [`f784d7ad`](https://togithub.com/sinonjs/sinon/commit/f784d7ad2c86be0fc65477d69f8bdafca846ef2c) Re-introduce new version.sh script to version hook (Joel Bradshaw) > This was inadvertently removed during merge conflicts, and is required > for any of the new release process stuff to work - [`51c508ab`](https://togithub.com/sinonjs/sinon/commit/51c508ab77cf0f9fb8c5305ff626f6a2eada178f) Add dry run mode to `npm version` ([#​2436](https://togithub.com/sinonjs/sinon/issues/2436)) (Joel Bradshaw) > - Add DRY_RUN flag to skip publish/push > > - Allow overriding branch names for testing - [`05341dcf`](https://togithub.com/sinonjs/sinon/commit/05341dcf92ddca4a1d4c90966b1fcdc7039cff18) Update npm version scripts to manage new releases branch (Joel Bradshaw) - [`fe658261`](https://togithub.com/sinonjs/sinon/commit/fe65826171db69ed2986a1060db77944dbc98a6d) Remove release archives from master (Joel Bradshaw) > These archives made it difficult to find things in the GitHub interface, > and take up a lot of space in a checked-out repo for something that is > not useful to most people checking out the repository. > > The main purpose of these archives is to make old versions and > documentation available on the Sinon website that is run out of this > repo. This can be supported by using a separate branch for website > releases, and to maintain the archives. > > Following this commit, the `npm version` scripts will be updated to > automatically handle archiving the releases in the new releases branch > and keeping it up to date with master. > > Also remove the directories we removed from .prettierignore, since they > don't exist any more. *Released by [Carl-Erik Kopseng](https://togithub.com/fatso83) on 2022-02-01.* ### [`v13.0.0`](https://togithub.com/sinonjs/sinon/blob/HEAD/CHANGES.md#​1300) [Compare Source](https://togithub.com/sinonjs/sinon/compare/v12.0.1...v13.0.0) - [`cf3d6c0c`](https://togithub.com/sinonjs/sinon/commit/cf3d6c0cd9689c0ee673b3daa8bf9abd70304392) Upgrade packages ([#​2431](https://togithub.com/sinonjs/sinon/issues/2431)) (Carl-Erik Kopseng) > - Update all @​sinonjs/ packages > > - Upgrade to fake-timers 9 > > - chore: ensure always using latest LTS release - [`41710467`](https://togithub.com/sinonjs/sinon/commit/417104670d575e96a1b645ea40ce763afa76fb1b) Adjust deploy scripts to archive old releases in a separate branch, move existing releases out of master ([#​2426](https://togithub.com/sinonjs/sinon/issues/2426)) (Joel Bradshaw) > Co-authored-by: Carl-Erik Kopseng - [`c80a7266`](https://togithub.com/sinonjs/sinon/commit/c80a72660e89d88b08275eff1028ecb9e26fd8e9) Bump node-fetch from 2.6.1 to 2.6.7 ([#​2430](https://togithub.com/sinonjs/sinon/issues/2430)) (dependabot\[bot]) > Co-authored-by: dependabot\[bot] <49699333+dependabot\[bot][@​users](https://togithub.com/users).noreply.github.com> - [`a00f14a9`](https://togithub.com/sinonjs/sinon/commit/a00f14a97dbe8c65afa89674e16ad73fc7d2fdc0) Add explicit export for `./*` ([#​2413](https://togithub.com/sinonjs/sinon/issues/2413)) (なつき) - [`b82ca7ad`](https://togithub.com/sinonjs/sinon/commit/b82ca7ad9b1add59007771f65a18ee34415de8ca) Bump cached-path-relative from 1.0.2 to 1.1.0 ([#​2428](https://togithub.com/sinonjs/sinon/issues/2428)) (dependabot\[bot]) - [`a9ea1427`](https://togithub.com/sinonjs/sinon/commit/a9ea142716c094ef3c432ecc4089f8207b8dd8b6) Add documentation for assert.calledOnceWithMatch ([#​2424](https://togithub.com/sinonjs/sinon/issues/2424)) (Mathias Schreck) - [`1d5ab86b`](https://togithub.com/sinonjs/sinon/commit/1d5ab86ba60e50dd69593ffed2bffd4b8faa0d38) Be more general in stripping off stack frames to fix Firefox tests ([#​2425](https://togithub.com/sinonjs/sinon/issues/2425)) (Joel Bradshaw) - [`56b06129`](https://togithub.com/sinonjs/sinon/commit/56b06129e223eae690265c37b1113067e2b31bdc) Check call count type ([#​2410](https://togithub.com/sinonjs/sinon/issues/2410)) (Joel Bradshaw) - [`7863e2df`](https://togithub.com/sinonjs/sinon/commit/7863e2dfdbda79e0a32e42af09e6539fc2f2b80f) Fix [#​2414](https://togithub.com/sinonjs/sinon/issues/2414): make Sinon available on homepage (Carl-Erik Kopseng) - [`fabaabdd`](https://togithub.com/sinonjs/sinon/commit/fabaabdda82f39a7f5b75b55bd56cf77b1cd4a8f) Bump nokogiri from 1.11.4 to 1.13.1 ([#​2423](https://togithub.com/sinonjs/sinon/issues/2423)) (dependabot\[bot]) - [`dbc0fbd2`](https://togithub.com/sinonjs/sinon/commit/dbc0fbd263c8419fa47f9c3b20cf47890a242d21) Bump shelljs from 0.8.4 to 0.8.5 ([#​2422](https://togithub.com/sinonjs/sinon/issues/2422)) (dependabot\[bot]) - [`fb8b3d72`](https://togithub.com/sinonjs/sinon/commit/fb8b3d72a85dc8fb0547f859baf3f03a22a039f7) Run Prettier (Carl-Erik Kopseng) - [`12a45939`](https://togithub.com/sinonjs/sinon/commit/12a45939e9b047b6d3663fe55f2eb383ec63c4e1) Fix 2377: Throw error when trying to stub non-configurable or non-writable properties ([#​2417](https://togithub.com/sinonjs/sinon/issues/2417)) (Stuart Dotson) > Fixes issue [#​2377](https://togithub.com/sinonjs/sinon/issues/2377) by throwing an error when trying to stub non-configurable or non-writable properties *Released by [Carl-Erik Kopseng](https://togithub.com/fatso83) on 2022-01-28.*
--- ### Configuration 📅 **Schedule**: "after 9am and before 3pm" (UTC). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 02692b0ed74..2790c6440f6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -42,7 +42,7 @@ "mocha": "^8.0.0", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^12.0.0", + "sinon": "^13.0.0", "ts-loader": "^9.0.0", "typescript": "^3.8.3", "webpack": "^5.0.0", From f799cee800e9209ab9203339137049e1c290662d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 4 Apr 2022 18:32:11 +0000 Subject: [PATCH 169/333] chore: Enable Size-Label bot in all googleapis NodeJs repositories (#1382) (#253) * chore: Enable Size-Label bot in all googleapis NodeJs repositories Auto-label T-shirt size indicator should be assigned on every new pull request in all googleapis NodeJs repositories * Remove product Remove product since it is by default true Source-Link: https://github.com/googleapis/synthtool/commit/f1562fa1c219d7176f79e3eea611b268c361e93d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:bb4d47d0e770abad62699a4664ce6b9ff1629d50c276a6c75860a6a1853dd19b --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 3 ++- handwritten/bigquery-storage/.github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 handwritten/bigquery-storage/.github/auto-label.yaml diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 84059c19485..c6ddf44fb1c 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:a9d166a74752226923d159cb723df53429e226c9c076dad3ca52ffd073ff3bb4 + digest: sha256:bb4d47d0e770abad62699a4664ce6b9ff1629d50c276a6c75860a6a1853dd19b +# created: 2022-04-01T19:19:56.587347289Z diff --git a/handwritten/bigquery-storage/.github/auto-label.yaml b/handwritten/bigquery-storage/.github/auto-label.yaml new file mode 100644 index 00000000000..09c8d735b45 --- /dev/null +++ b/handwritten/bigquery-storage/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From 018a4630d831530307e20b673650763c3024b7e5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 15:44:23 +0000 Subject: [PATCH 170/333] chore(deps): update actions/setup-node action to v3 (#1393) (#254) Co-authored-by: Jeffrey Rennie Source-Link: https://github.com/googleapis/synthtool/commit/6593fb2234deff0444032cb2a91100bde4985caf Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:1d25dfefd805b689a2a2356d35a25b13f2f67bcce55400246432c43a42e96214 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index c6ddf44fb1c..ba38c131eba 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:bb4d47d0e770abad62699a4664ce6b9ff1629d50c276a6c75860a6a1853dd19b -# created: 2022-04-01T19:19:56.587347289Z + digest: sha256:1d25dfefd805b689a2a2356d35a25b13f2f67bcce55400246432c43a42e96214 +# created: 2022-04-05T22:42:50.409517925Z From cb2d4f9d02bf3026b7e2b4e17d99e73d28682a35 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 20:18:25 +0000 Subject: [PATCH 171/333] chore(deps): update actions/checkout action to v3 (#1392) (#255) Co-authored-by: Jeffrey Rennie Source-Link: https://github.com/googleapis/synthtool/commit/9368bc795a376954920c374406e92efb0e3d0ac4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:f74e740638e66be7ced1540626217dbb72980eb73885b2339a70592f38c9ff2c --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index ba38c131eba..b4c08f9a4a2 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:1d25dfefd805b689a2a2356d35a25b13f2f67bcce55400246432c43a42e96214 -# created: 2022-04-05T22:42:50.409517925Z + digest: sha256:f74e740638e66be7ced1540626217dbb72980eb73885b2339a70592f38c9ff2c +# created: 2022-04-06T18:36:33.987617127Z From e7e28a8fbd7697c6b6f2bec507bdd7b763504eb9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 14:13:51 -0700 Subject: [PATCH 172/333] feat: Deprecate format specific `row_count` field in Read API (#249) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix!: remove bigquery.readonly auth scope PiperOrigin-RevId: 429331987 Source-Link: https://github.com/googleapis/googleapis/commit/4b2bd2637d6df992933b7437cf64f1c7d7b6ed9b Source-Link: https://github.com/googleapis/googleapis-gen/commit/fa17f03b5ae96316cd02f2997f2fd1196e034e5e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmExN2YwM2I1YWU5NjMxNmNkMDJmMjk5N2YyZmQxMTk2ZTAzNGU1ZSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs(samples): include metadata file, add exclusions for samples to handwritten libraries PiperOrigin-RevId: 429395631 Source-Link: https://github.com/googleapis/googleapis/commit/84594b35af0c38efcd6967e8179d801702ad96ff Source-Link: https://github.com/googleapis/googleapis-gen/commit/ed74f970fd82914874e6b27b04763cfa66bafe9b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZWQ3NGY5NzBmZDgyOTE0ODc0ZTZiMjdiMDQ3NjNjZmE2NmJhZmU5YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add trace_id for Read API PiperOrigin-RevId: 429809867 Source-Link: https://github.com/googleapis/googleapis/commit/60526031e76ff3ed1ec512896f9b8d1e55d6eb45 Source-Link: https://github.com/googleapis/googleapis-gen/commit/840dd14c91a0ec66d9bd1a028d01b5fd029ca02e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODQwZGQxNGM5MWEwZWM2NmQ5YmQxYTAyOGQwMWI1ZmQwMjljYTAyZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: improve documentation for write client feat: update default timeout/retry information feat: update parent annotation for BatchCommitWriteStreamsRequest feat: expose additional StorageError enum values PiperOrigin-RevId: 431973595 Source-Link: https://github.com/googleapis/googleapis/commit/25d691b074e0b932e1c3f8a690ced8ddec8ae50e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0ca21879ae3136a2d71fa824b49dfaf8a0ae0f2e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGNhMjE4NzlhZTMxMzZhMmQ3MWZhODI0YjQ5ZGZhZjhhMGFlMGYyZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update v2.14.1 gapic-generator-typescript Committer: @summer-ji-eng PiperOrigin-RevId: 433031262 Source-Link: https://github.com/googleapis/googleapis/commit/2a55d13d9dc6cbf99990d1ab490e0ea07b5e7649 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2fce3893ae9da47763e0872c4a3a87d9ff78771f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZjZTM4OTNhZTlkYTQ3NzYzZTA4NzJjNGEzYTg3ZDlmZjc4NzcxZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update v2.14.2 gapic-generator-typescript Committer: @summer-ji-eng PiperOrigin-RevId: 434859890 Source-Link: https://github.com/googleapis/googleapis/commit/bc2432d50cba657e95212122e3fa112591b5bec2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/930b673103e92523f8cfed38decd7d3afae8ebe7 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTMwYjY3MzEwM2U5MjUyM2Y4Y2ZlZDM4ZGVjZDdkM2FmYWU4ZWJlNyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Deprecate format specific `row_count` field in Read API PiperOrigin-RevId: 438434001 Source-Link: https://github.com/googleapis/googleapis/commit/727f08ba466745c17f71cd2084540878392de31d Source-Link: https://github.com/googleapis/googleapis-gen/commit/2d62ab0d3d650b7aba4ec5e5a96dd8cdbae89fb7 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmQ2MmFiMGQzZDY1MGI3YWJhNGVjNWU1YTk2ZGQ4Y2RiYWU4OWZiNyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Steffany Brown <30247553+steffnay@users.noreply.github.com> Co-authored-by: Benjamin E. Coe Co-authored-by: Jeffrey Rennie --- .../bigquery-storage/linkinator.config.json | 10 +- .../cloud/bigquery/storage/v1/arrow.proto | 7 +- .../cloud/bigquery/storage/v1/avro.proto | 7 +- .../cloud/bigquery/storage/v1/protobuf.proto | 2 +- .../cloud/bigquery/storage/v1/storage.proto | 27 +- .../cloud/bigquery/storage/v1/stream.proto | 10 +- .../cloud/bigquery/storage/v1/table.proto | 2 +- .../bigquery/storage/v1beta1/storage.proto | 1 - .../bigquery-storage/protos/protos.d.ts | 10 +- handwritten/bigquery-storage/protos/protos.js | 36 ++ .../bigquery-storage/protos/protos.json | 28 +- .../v1/big_query_read.create_read_session.js | 9 +- .../generated/v1/big_query_read.read_rows.js | 9 +- .../v1/big_query_read.split_read_stream.js | 9 +- .../v1/big_query_write.append_rows.js | 13 +- ..._query_write.batch_commit_write_streams.js | 9 +- .../v1/big_query_write.create_write_stream.js | 9 +- .../big_query_write.finalize_write_stream.js | 9 +- .../v1/big_query_write.flush_rows.js | 9 +- .../v1/big_query_write.get_write_stream.js | 9 +- ...data.google.cloud.bigquery.storage.v1.json | 415 ++++++++++++++++++ ...orage.batch_create_read_session_streams.js | 9 +- .../big_query_storage.create_read_session.js | 9 +- .../big_query_storage.finalize_stream.js | 9 +- .../v1beta1/big_query_storage.read_rows.js | 9 +- .../big_query_storage.split_read_stream.js | 9 +- ...google.cloud.bigquery.storage.v1beta1.json | 247 +++++++++++ .../src/v1/big_query_read_client.ts | 27 +- .../src/v1/big_query_write_client.ts | 33 +- .../src/v1beta1/big_query_storage_client.ts | 27 +- .../test/gapic_big_query_read_v1.ts | 79 +++- .../test/gapic_big_query_storage_v1beta1.ts | 119 ++++- .../test/gapic_big_query_write_v1.ts | 103 ++++- 33 files changed, 1239 insertions(+), 81 deletions(-) create mode 100644 handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json index 0121dfa684f..befd23c8633 100644 --- a/handwritten/bigquery-storage/linkinator.config.json +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -3,8 +3,14 @@ "skip": [ "https://codecov.io/gh/googleapis/", "www.googleapis.com", - "img.shields.io" + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" ], "silent": true, - "concurrency": 5 + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 514b77e617c..6d3f6080bf6 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,8 +39,9 @@ message ArrowRecordBatch { // IPC-serialized Arrow RecordBatch. bytes serialized_record_batch = 1; - // The count of rows in `serialized_record_batch`. - int64 row_count = 2; + // [Deprecated] The count of rows in `serialized_record_batch`. + // Please use the format-independent ReadRowsResponse.row_count instead. + int64 row_count = 2 [deprecated = true]; } // Contains options specific to Arrow Serialization. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index dee4a6ed229..15de2db5468 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ message AvroRows { // Binary serialized rows in a block. bytes serialized_binary_rows = 1; - // The count of rows in the returning block. - int64 row_count = 2; + // [Deprecated] The count of rows in the returning block. + // Please use the format-independent ReadRowsResponse.row_count instead. + int64 row_count = 2 [deprecated = true]; } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto index f987467dd9f..b3754acf7b3 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index ab5a46cf180..67c6c8a0295 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -47,7 +47,6 @@ service BigQueryRead { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/bigquery.readonly," "https://www.googleapis.com/auth/cloud-platform"; // Creates a new read session. A read session divides the contents of a @@ -168,6 +167,13 @@ service BigQueryWrite { // * For PENDING streams, data is not made visible until the stream itself is // finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly // committed via the `BatchCommitWriteStreams` rpc. + // + // Note: For users coding against the gRPC api directly, it may be + // necessary to supply the x-goog-request-params system parameter + // with `write_stream=`. + // + // More information about system parameters: + // https://cloud.google.com/apis/docs/system-parameters rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) { option (google.api.http) = { post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" @@ -409,10 +415,12 @@ message AppendRowsRequest { // request. // // For explicitly created write streams, the format is: - // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` + // + // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` // // For the special default stream, the format is: - // `projects/{project}/datasets/{dataset}/tables/{table}/_default`. + // + // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. string write_stream = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -499,7 +507,10 @@ message BatchCommitWriteStreamsRequest { // Required. Parent table that all the streams should belong to, in the form of // `projects/{project}/datasets/{dataset}/tables/{table}`. string parent = 1 [ - (google.api.field_behavior) = REQUIRED + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } ]; // Required. The group of streams that will be committed atomically. @@ -594,6 +605,12 @@ message StorageError { // There is a schema mismatch and it is caused by user schema has extra // field than bigquery schema. SCHEMA_MISMATCH_EXTRA_FIELDS = 7; + + // Offset already exists. + OFFSET_ALREADY_EXISTS = 8; + + // Offset out of range. + OFFSET_OUT_OF_RANGE = 9; } // BigQuery Storage specific error code. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 0b0bc1ad05b..bd1fa2ce98a 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -132,6 +132,14 @@ message ReadSession { // all streams are completely consumed. This estimate is based on // metadata from the table which might be incomplete or stale. int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. ID set by client to annotate a session identity. This does not need + // to be strictly unique, but instead the same ID should be used to group + // logically connected sessions (e.g. All using the same ID for all sessions + // needed to complete a Spark SQL query is reasonable). + // + // Maximum length is 256 bytes. + string trace_id = 13 [(google.api.field_behavior) = OPTIONAL]; } // Information about a single stream that gets data out of the storage system. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index a8c6f844df5..545f6292712 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -1,4 +1,4 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 81e77c73af5..0d311418a49 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -37,7 +37,6 @@ service BigQueryStorage { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/bigquery.readonly," "https://www.googleapis.com/auth/cloud-platform"; // Creates a new read session. A read session divides the contents of a diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 12ded42b9a9..a82f40762cd 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -2984,7 +2984,9 @@ export namespace google { INVALID_STREAM_TYPE = 4, INVALID_STREAM_STATE = 5, STREAM_FINALIZED = 6, - SCHEMA_MISMATCH_EXTRA_FIELDS = 7 + SCHEMA_MISMATCH_EXTRA_FIELDS = 7, + OFFSET_ALREADY_EXISTS = 8, + OFFSET_OUT_OF_RANGE = 9 } } @@ -3027,6 +3029,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned */ estimatedTotalBytesScanned?: (number|Long|string|null); + + /** ReadSession traceId */ + traceId?: (string|null); } /** Represents a ReadSession. */ @@ -3068,6 +3073,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned. */ public estimatedTotalBytesScanned: (number|Long|string); + /** ReadSession traceId. */ + public traceId: string; + /** ReadSession schema. */ public schema?: ("avroSchema"|"arrowSchema"); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 930ac6091f0..1d0989c2671 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -6611,6 +6611,8 @@ case 5: case 6: case 7: + case 8: + case 9: break; } if (message.entity != null && message.hasOwnProperty("entity")) @@ -6667,6 +6669,14 @@ case 7: message.code = 7; break; + case "OFFSET_ALREADY_EXISTS": + case 8: + message.code = 8; + break; + case "OFFSET_OUT_OF_RANGE": + case 9: + message.code = 9; + break; } if (object.entity != null) message.entity = String(object.entity); @@ -6725,6 +6735,8 @@ * @property {number} INVALID_STREAM_STATE=5 INVALID_STREAM_STATE value * @property {number} STREAM_FINALIZED=6 STREAM_FINALIZED value * @property {number} SCHEMA_MISMATCH_EXTRA_FIELDS=7 SCHEMA_MISMATCH_EXTRA_FIELDS value + * @property {number} OFFSET_ALREADY_EXISTS=8 OFFSET_ALREADY_EXISTS value + * @property {number} OFFSET_OUT_OF_RANGE=9 OFFSET_OUT_OF_RANGE value */ StorageError.StorageErrorCode = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -6736,6 +6748,8 @@ values[valuesById[5] = "INVALID_STREAM_STATE"] = 5; values[valuesById[6] = "STREAM_FINALIZED"] = 6; values[valuesById[7] = "SCHEMA_MISMATCH_EXTRA_FIELDS"] = 7; + values[valuesById[8] = "OFFSET_ALREADY_EXISTS"] = 8; + values[valuesById[9] = "OFFSET_OUT_OF_RANGE"] = 9; return values; })(); @@ -6774,6 +6788,7 @@ * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions * @property {Array.|null} [streams] ReadSession streams * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned + * @property {string|null} [traceId] ReadSession traceId */ /** @@ -6872,6 +6887,14 @@ */ ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** + * ReadSession traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.traceId = ""; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -6931,6 +6954,8 @@ $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 13, wireType 2 =*/106).string(message.traceId); return writer; }; @@ -6997,6 +7022,9 @@ case 12: message.estimatedTotalBytesScanned = reader.int64(); break; + case 13: + message.traceId = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -7093,6 +7121,9 @@ if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) return "estimatedTotalBytesScanned: integer|Long expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; @@ -7170,6 +7201,8 @@ message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; else if (typeof object.estimatedTotalBytesScanned === "object") message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); + if (object.traceId != null) + message.traceId = String(object.traceId); return message; }; @@ -7200,6 +7233,7 @@ object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; + object.traceId = ""; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -7233,6 +7267,8 @@ object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; else object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 651ae7e0329..a892cd68782 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -36,7 +36,10 @@ }, "rowCount": { "type": "int64", - "id": 2 + "id": 2, + "options": { + "deprecated": true + } } } }, @@ -73,7 +76,10 @@ }, "rowCount": { "type": "int64", - "id": 2 + "id": 2, + "options": { + "deprecated": true + } } } }, @@ -97,7 +103,7 @@ "BigQueryRead": { "options": { "(google.api.default_host)": "bigquerystorage.googleapis.com", - "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" }, "methods": { "CreateReadSession": { @@ -548,7 +554,8 @@ "type": "string", "id": 1, "options": { - "(google.api.field_behavior)": "REQUIRED" + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" } }, "writeStreams": { @@ -643,7 +650,9 @@ "INVALID_STREAM_TYPE": 4, "INVALID_STREAM_STATE": 5, "STREAM_FINALIZED": 6, - "SCHEMA_MISMATCH_EXTRA_FIELDS": 7 + "SCHEMA_MISMATCH_EXTRA_FIELDS": 7, + "OFFSET_ALREADY_EXISTS": 8, + "OFFSET_OUT_OF_RANGE": 9 } } } @@ -740,6 +749,13 @@ "options": { "(google.api.field_behavior)": "OUTPUT_ONLY" } + }, + "traceId": { + "type": "string", + "id": 13, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } }, "nested": { @@ -1026,7 +1042,7 @@ "BigQueryStorage": { "options": { "(google.api.default_host)": "bigquerystorage.googleapis.com", - "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" }, "methods": { "CreateReadSession": { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index a14c16b7ba0..f4d2f95025e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index a51ad69dd47..f5f781a46b8 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index 478f78e1e90..6e146955b95 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 51db4c07a0e..9cefbb22166 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; @@ -26,9 +31,9 @@ function main(writeStream) { * If provided for subsequent requests, it must match the value of the first * request. * For explicitly created write streams, the format is: - * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` + * * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` * For the special default stream, the format is: - * `projects/{project}/datasets/{dataset}/tables/{table}/_default`. + * * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. */ // const writeStream = 'abc123' /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index a9f7ea031e7..9c258a30082 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index 4537cb615f3..839a8bf6628 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index bb660301138..3ff3da7adc8 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index 94f1f5253a9..751fdebf6e0 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 4f2caa2a80c..1650750820f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json new file mode 100644 index 00000000000..a847f73832d --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -0,0 +1,415 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1", + "version": "v1" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", + "title": "BigQueryRead createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_read.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 66, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_session", + "type": ".google.cloud.bigquery.storage.v1.ReadSession" + }, + { + "name": "max_stream_count", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadSession", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", + "title": "BigQueryRead readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.", + "canonical": true, + "file": "big_query_read.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": "TYPE_INT64" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", + "title": "BigQueryRead splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.", + "canonical": true, + "file": "big_query_read.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 60, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "fraction", + "type": "TYPE_DOUBLE" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", + "title": "BigQueryRead createWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.", + "canonical": true, + "file": "big_query_write.create_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_stream", + "type": ".google.cloud.bigquery.storage.v1.WriteStream" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", + "title": "BigQueryRead appendRows Sample", + "origin": "API_DEFINITION", + "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc. Note: For users coding against the gRPC api directly, it may be necessary to supply the x-goog-request-params system parameter with `write_stream=`. More information about system parameters: https://cloud.google.com/apis/docs/system-parameters", + "canonical": true, + "file": "big_query_write.append_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 77, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + }, + { + "name": "proto_rows", + "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", + "title": "BigQueryRead getWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Gets information about a write stream.", + "canonical": true, + "file": "big_query_write.get_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", + "title": "BigQueryRead finalizeWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.", + "canonical": true, + "file": "big_query_write.finalize_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", + "title": "BigQueryRead batchCommitWriteStreams Sample", + "origin": "API_DEFINITION", + "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.", + "canonical": true, + "file": "big_query_write.batch_commit_write_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_streams", + "type": "TYPE_STRING[]" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", + "title": "BigQueryRead flushRows Sample", + "origin": "API_DEFINITION", + "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.", + "canonical": true, + "file": "big_query_write.flush_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + } + ] +} diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index ed64422d3a1..3d7b6d3e429 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index 9cf38ca4274..c71a464ce5d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index 33ca0da184a..51d63d0bb69 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index 5436f8f83fb..65924b1746b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index a517cdf5562..24ae597d43a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -1,16 +1,21 @@ -// Copyright 2021 Google LLC +// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + 'use strict'; diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json new file mode 100644 index 00000000000..a36b0694cf8 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -0,0 +1,247 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta1", + "version": "v1beta1" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async", + "title": "BigQueryStorage createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 24 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_storage.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 83, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "table_reference", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReference" + }, + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "table_modifiers", + "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + }, + { + "name": "read_options", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions" + }, + { + "name": "format", + "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat" + }, + { + "name": "sharding_strategy", + "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async", + "title": "BigQueryStorage readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.", + "canonical": true, + "file": "big_query_storage.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_position", + "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async", + "title": "BigQueryStorage batchCreateReadSessionStreams Sample", + "origin": "API_DEFINITION", + "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.", + "canonical": true, + "file": "big_query_storage.batch_create_read_session_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "async": true, + "parameters": [ + { + "name": "session", + "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async", + "title": "BigQueryStorage finalizeStream Sample", + "origin": "API_DEFINITION", + "description": " Triggers the graceful termination of a single stream in a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", + "canonical": true, + "file": "big_query_storage.finalize_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 50, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "async": true, + "parameters": [ + { + "name": "stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async", + "title": "BigQueryStorage splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.", + "canonical": true, + "file": "big_query_storage.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 60, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "original_stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + }, + { + "name": "fraction", + "type": "TYPE_FLOAT" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + } + ] +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index f133143b2fe..a0246aef5d2 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -18,8 +18,15 @@ /* global window */ import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; - +import { + Callback, + CallOptions, + Descriptors, + ClientOptions, + GoogleError, +} from 'google-gax'; + +import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** @@ -241,6 +248,16 @@ export class BigQueryReadClient { stub => (...args: Array<{}>) => { if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new GoogleError('The client has already been closed.') + ); + }); + return stream; + } return Promise.reject('The client has already been closed.'); } const func = stub[methodName]; @@ -297,7 +314,6 @@ export class BigQueryReadClient { static get scopes() { return [ 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/bigquery.readonly', 'https://www.googleapis.com/auth/cloud-platform', ]; } @@ -885,9 +901,8 @@ export class BigQueryReadClient { * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise { - this.initialize(); - if (!this._terminated) { - return this.bigQueryReadStub!.then(stub => { + if (this.bigQueryReadStub && !this._terminated) { + return this.bigQueryReadStub.then(stub => { this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index b775bf6fe29..1cb67db7dac 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -18,8 +18,15 @@ /* global window */ import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; +import { + Callback, + CallOptions, + Descriptors, + ClientOptions, + GoogleError, +} from 'google-gax'; +import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** @@ -247,6 +254,16 @@ export class BigQueryWriteClient { stub => (...args: Array<{}>) => { if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new GoogleError('The client has already been closed.') + ); + }); + return stream; + } return Promise.reject('The client has already been closed.'); } const func = stub[methodName]; @@ -873,6 +890,13 @@ export class BigQueryWriteClient { * finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly * committed via the `BatchCommitWriteStreams` rpc. * + * Note: For users coding against the gRPC api directly, it may be + * necessary to supply the x-goog-request-params system parameter + * with `write_stream=`. + * + * More information about system parameters: + * https://cloud.google.com/apis/docs/system-parameters + * * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} @@ -887,7 +911,7 @@ export class BigQueryWriteClient { */ appendRows(options?: CallOptions): gax.CancellableStream { this.initialize(); - return this.innerApiCalls.appendRows(options); + return this.innerApiCalls.appendRows(null, options); } // -------------------- @@ -1167,9 +1191,8 @@ export class BigQueryWriteClient { * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise { - this.initialize(); - if (!this._terminated) { - return this.bigQueryWriteStub!.then(stub => { + if (this.bigQueryWriteStub && !this._terminated) { + return this.bigQueryWriteStub.then(stub => { this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 72b13b1ef43..342ef1a6e9c 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -18,8 +18,15 @@ /* global window */ import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions} from 'google-gax'; - +import { + Callback, + CallOptions, + Descriptors, + ClientOptions, + GoogleError, +} from 'google-gax'; + +import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** @@ -238,6 +245,16 @@ export class BigQueryStorageClient { stub => (...args: Array<{}>) => { if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new GoogleError('The client has already been closed.') + ); + }); + return stream; + } return Promise.reject('The client has already been closed.'); } const func = stub[methodName]; @@ -294,7 +311,6 @@ export class BigQueryStorageClient { static get scopes() { return [ 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/bigquery.readonly', 'https://www.googleapis.com/auth/cloud-platform', ]; } @@ -962,9 +978,8 @@ export class BigQueryStorageClient { * @returns {Promise} A promise that resolves when the client is closed. */ close(): Promise { - this.initialize(); - if (!this._terminated) { - return this.bigQueryStorageStub!.then(stub => { + if (this.bigQueryStorageStub && !this._terminated) { + return this.bigQueryStorageStub.then(stub => { this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index a5c551d3198..793ecd88c3e 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -111,12 +111,27 @@ describe('v1.BigQueryReadClient', () => { assert(client.bigQueryReadStub); }); - it('has close method', () => { + it('has close method for the initialized client', done => { const client = new bigqueryreadModule.v1.BigQueryReadClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.close(); + client.initialize(); + assert(client.bigQueryReadStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + client.close().then(() => { + done(); + }); }); it('has getProjectId method', async () => { @@ -265,6 +280,22 @@ describe('v1.BigQueryReadClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes createReadSession with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession = {}; + request.readSession.table = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createReadSession(request), expectedError); + }); }); describe('splitReadStream', () => { @@ -376,6 +407,21 @@ describe('v1.BigQueryReadClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes splitReadStream with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.splitReadStream(request), expectedError); + }); }); describe('readRows', () => { @@ -468,6 +514,35 @@ describe('v1.BigQueryReadClient', () => { .calledWith(request, expectedOptions) ); }); + + it('invokes readRows with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + request.readStream = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + }); }); describe('Path templates', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 3365f629aee..005e54894e1 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -113,12 +113,27 @@ describe('v1beta1.BigQueryStorageClient', () => { assert(client.bigQueryStorageStub); }); - it('has close method', () => { + it('has close method for the initialized client', done => { const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.close(); + client.initialize(); + assert(client.bigQueryStorageStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + client.close().then(() => { + done(); + }); }); it('has getProjectId method', async () => { @@ -276,6 +291,24 @@ describe('v1beta1.BigQueryStorageClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes createReadSession with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference = {}; + request.tableReference.projectId = ''; + request.tableReference = {}; + request.tableReference.datasetId = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createReadSession(request), expectedError); + }); }); describe('batchCreateReadSessionStreams', () => { @@ -394,6 +427,25 @@ describe('v1beta1.BigQueryStorageClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes batchCreateReadSessionStreams with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session = {}; + request.session.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchCreateReadSessionStreams(request), + expectedError + ); + }); }); describe('finalizeStream', () => { @@ -508,6 +560,22 @@ describe('v1beta1.BigQueryStorageClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes finalizeStream with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream = {}; + request.stream.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.finalizeStream(request), expectedError); + }); }); describe('splitReadStream', () => { @@ -622,6 +690,22 @@ describe('v1beta1.BigQueryStorageClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes splitReadStream with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream = {}; + request.originalStream.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.splitReadStream(request), expectedError); + }); }); describe('readRows', () => { @@ -718,6 +802,37 @@ describe('v1beta1.BigQueryStorageClient', () => { .calledWith(request, expectedOptions) ); }); + + it('invokes readRows with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition = {}; + request.readPosition.stream = {}; + request.readPosition.stream.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + }); }); describe('Path templates', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index 608b649713f..44fdb98de72 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -104,12 +104,27 @@ describe('v1.BigQueryWriteClient', () => { assert(client.bigQueryWriteStub); }); - it('has close method', () => { + it('has close method for the initialized client', done => { const client = new bigquerywriteModule.v1.BigQueryWriteClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.close(); + client.initialize(); + assert(client.bigQueryWriteStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + client.close().then(() => { + done(); + }); }); it('has getProjectId method', async () => { @@ -255,6 +270,21 @@ describe('v1.BigQueryWriteClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes createWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createWriteStream(request), expectedError); + }); }); describe('getWriteStream', () => { @@ -366,6 +396,21 @@ describe('v1.BigQueryWriteClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes getWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getWriteStream(request), expectedError); + }); }); describe('finalizeWriteStream', () => { @@ -478,6 +523,21 @@ describe('v1.BigQueryWriteClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes finalizeWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.finalizeWriteStream(request), expectedError); + }); }); describe('batchCommitWriteStreams', () => { @@ -593,6 +653,24 @@ describe('v1.BigQueryWriteClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes batchCommitWriteStreams with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchCommitWriteStreams(request), + expectedError + ); + }); }); describe('flushRows', () => { @@ -701,6 +779,21 @@ describe('v1.BigQueryWriteClient', () => { .calledWith(request, expectedOptions, undefined) ); }); + + it('invokes flushRows with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + request.writeStream = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.flushRows(request), expectedError); + }); }); describe('appendRows', () => { @@ -738,7 +831,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.innerApiCalls.appendRows as SinonStub) .getCall(0) - .calledWithExactly(undefined) + .calledWith(null) ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) @@ -756,8 +849,6 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() ); - request.writeStream = ''; - const expectedHeaderRequestParams = 'write_stream='; const expectedError = new Error('expected'); client.innerApiCalls.appendRows = stubBidiStreamingCall( undefined, @@ -783,7 +874,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.innerApiCalls.appendRows as SinonStub) .getCall(0) - .calledWithExactly(undefined) + .calledWith(null) ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) From 1aa6f9c63ba793ba2491304b6b0eb0595b74a522 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 02:34:21 +0000 Subject: [PATCH 173/333] build(node): update client library version in samples metadata (#1356) (#262) * build(node): add feat in node post-processor to add client library version number in snippet metadata Co-authored-by: Benjamin E. Coe Source-Link: https://github.com/googleapis/synthtool/commit/d337b88dd1494365183718a2de0b7b4056b6fdfe Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:d106724ad2a96daa1b8d88de101ba50bdb30b8df62ffa0aa2b451d93b4556641 --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/sync-repo-settings.yaml | 17 + .../bigquery-storage/.kokoro/common.cfg | 2 +- .../bigquery-storage/.kokoro/release/docs.cfg | 2 +- .../bigquery-storage/.kokoro/samples-test.sh | 2 +- .../bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- ...data.google.cloud.bigquery.storage.v1.json | 792 +++++++++--------- ...google.cloud.bigquery.storage.v1beta1.json | 472 +++++------ 9 files changed, 656 insertions(+), 639 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/sync-repo-settings.yaml diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index b4c08f9a4a2..9017db80d17 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:f74e740638e66be7ced1540626217dbb72980eb73885b2339a70592f38c9ff2c -# created: 2022-04-06T18:36:33.987617127Z + digest: sha256:d106724ad2a96daa1b8d88de101ba50bdb30b8df62ffa0aa2b451d93b4556641 +# created: 2022-04-20T16:59:29.058398639Z diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml new file mode 100644 index 00000000000..1b36268333a --- /dev/null +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -0,0 +1,17 @@ +branchProtectionRules: + - pattern: main + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - "ci/kokoro: Samples test" + - "ci/kokoro: System test" + - docs + - lint + - test (10) + - test (12) + - test (14) + - cla/google + - windows + - OwlBot Post Processor diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg index c38e1a2c9b4..7fc0cdeac69 100644 --- a/handwritten/bigquery-storage/.kokoro/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg index 21d0eb33836..17861c90782 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index f249d3e4a2e..fbc058a4ec4 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -56,7 +56,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=10 +COVERAGE_NODE=12 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 0a840452084..87fa0653d76 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -49,7 +49,7 @@ npm run system-test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=10 +COVERAGE_NODE=12 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index af1ce7e33ca..a5c7ac04cd3 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -39,7 +39,7 @@ npm test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=10 +COVERAGE_NODE=12 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index a847f73832d..93aaa2255ba 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,415 +1,415 @@ { - "clientLibrary": { - "name": "nodejs-storage", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1", - "version": "v1" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", - "title": "BigQueryRead createReadSession Sample", - "origin": "API_DEFINITION", - "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", - "canonical": true, - "file": "big_query_read.create_read_session.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 66, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateReadSession", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_session", - "type": ".google.cloud.bigquery.storage.v1.ReadSession" - }, - { - "name": "max_stream_count", - "type": "TYPE_INT32" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.ReadSession", - "client": { - "shortName": "BigQueryReadClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" - }, - "method": { - "shortName": "CreateReadSession", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "service": { - "shortName": "BigQueryRead", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" - } - } - } + "clientLibrary": { + "name": "nodejs-storage", + "version": "2.8.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1", + "version": "v1" + } + ] }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", - "title": "BigQueryRead readRows Sample", - "origin": "API_DEFINITION", - "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.", - "canonical": true, - "file": "big_query_read.read_rows.js", - "language": "JAVASCRIPT", - "segments": [ + "snippets": [ { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ReadRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "async": true, - "parameters": [ - { - "name": "read_stream", - "type": "TYPE_STRING" - }, - { - "name": "offset", - "type": "TYPE_INT64" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse", - "client": { - "shortName": "BigQueryReadClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", + "title": "BigQueryRead createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_read.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 66, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_session", + "type": ".google.cloud.bigquery.storage.v1.ReadSession" + }, + { + "name": "max_stream_count", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadSession", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } }, - "method": { - "shortName": "ReadRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "service": { - "shortName": "BigQueryRead", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", - "title": "BigQueryRead splitReadStream Sample", - "origin": "API_DEFINITION", - "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.", - "canonical": true, - "file": "big_query_read.split_read_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 60, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "SplitReadStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "fraction", - "type": "TYPE_DOUBLE" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse", - "client": { - "shortName": "BigQueryReadClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", + "title": "BigQueryRead readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.", + "canonical": true, + "file": "big_query_read.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": "TYPE_INT64" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } }, - "method": { - "shortName": "SplitReadStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "service": { - "shortName": "BigQueryRead", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", - "title": "BigQueryRead createWriteStream Sample", - "origin": "API_DEFINITION", - "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.", - "canonical": true, - "file": "big_query_write.create_write_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "write_stream", - "type": ".google.cloud.bigquery.storage.v1.WriteStream" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", + "title": "BigQueryRead splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.", + "canonical": true, + "file": "big_query_read.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 60, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "fraction", + "type": "TYPE_DOUBLE" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } }, - "method": { - "shortName": "CreateWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", - "title": "BigQueryRead appendRows Sample", - "origin": "API_DEFINITION", - "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc. Note: For users coding against the gRPC api directly, it may be necessary to supply the x-goog-request-params system parameter with `write_stream=`. More information about system parameters: https://cloud.google.com/apis/docs/system-parameters", - "canonical": true, - "file": "big_query_write.append_rows.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 77, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "AppendRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "async": true, - "parameters": [ - { - "name": "write_stream", - "type": "TYPE_STRING" - }, - { - "name": "offset", - "type": ".google.protobuf.Int64Value" - }, - { - "name": "proto_rows", - "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" - }, - { - "name": "trace_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", + "title": "BigQueryRead createWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.", + "canonical": true, + "file": "big_query_write.create_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_stream", + "type": ".google.cloud.bigquery.storage.v1.WriteStream" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } }, - "method": { - "shortName": "AppendRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", - "title": "BigQueryRead getWriteStream Sample", - "origin": "API_DEFINITION", - "description": " Gets information about a write stream.", - "canonical": true, - "file": "big_query_write.get_write_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", + "title": "BigQueryRead appendRows Sample", + "origin": "API_DEFINITION", + "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc. Note: For users coding against the gRPC api directly, it may be necessary to supply the x-goog-request-params system parameter with `write_stream=`. More information about system parameters: https://cloud.google.com/apis/docs/system-parameters", + "canonical": true, + "file": "big_query_write.append_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 77, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + }, + { + "name": "proto_rows", + "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } }, - "method": { - "shortName": "GetWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", - "title": "BigQueryRead finalizeWriteStream Sample", - "origin": "API_DEFINITION", - "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.", - "canonical": true, - "file": "big_query_write.finalize_write_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "FinalizeWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", + "title": "BigQueryRead getWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Gets information about a write stream.", + "canonical": true, + "file": "big_query_write.get_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } }, - "method": { - "shortName": "FinalizeWriteStream", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", - "title": "BigQueryRead batchCommitWriteStreams Sample", - "origin": "API_DEFINITION", - "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.", - "canonical": true, - "file": "big_query_write.batch_commit_write_streams.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "BatchCommitWriteStreams", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "write_streams", - "type": "TYPE_STRING[]" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", + "title": "BigQueryRead finalizeWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.", + "canonical": true, + "file": "big_query_write.finalize_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } }, - "method": { - "shortName": "BatchCommitWriteStreams", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", - "title": "BigQueryRead flushRows Sample", - "origin": "API_DEFINITION", - "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.", - "canonical": true, - "file": "big_query_write.flush_rows.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "FlushRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "async": true, - "parameters": [ - { - "name": "write_stream", - "type": "TYPE_STRING" - }, - { - "name": "offset", - "type": ".google.protobuf.Int64Value" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse", - "client": { - "shortName": "BigQueryWriteClient", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", + "title": "BigQueryRead batchCommitWriteStreams Sample", + "origin": "API_DEFINITION", + "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.", + "canonical": true, + "file": "big_query_write.batch_commit_write_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_streams", + "type": "TYPE_STRING[]" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } }, - "method": { - "shortName": "FlushRows", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "service": { - "shortName": "BigQueryWrite", - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" - } + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", + "title": "BigQueryRead flushRows Sample", + "origin": "API_DEFINITION", + "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.", + "canonical": true, + "file": "big_query_write.flush_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } } - } - } - ] -} + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index a36b0694cf8..4bed0e60cc4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,247 +1,247 @@ { - "clientLibrary": { - "name": "nodejs-storage", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1beta1", - "version": "v1beta1" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async", - "title": "BigQueryStorage createReadSession Sample", - "origin": "API_DEFINITION", - "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 24 hours after they are created and do not require manual clean-up by the caller.", - "canonical": true, - "file": "big_query_storage.create_read_session.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 83, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateReadSession", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", - "async": true, - "parameters": [ - { - "name": "table_reference", - "type": ".google.cloud.bigquery.storage.v1beta1.TableReference" - }, - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "table_modifiers", - "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers" - }, - { - "name": "requested_streams", - "type": "TYPE_INT32" - }, - { - "name": "read_options", - "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions" - }, - { - "name": "format", - "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat" - }, - { - "name": "sharding_strategy", - "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession", - "client": { - "shortName": "BigQueryStorageClient", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" - }, - "method": { - "shortName": "CreateReadSession", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", - "service": { - "shortName": "BigQueryStorage", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - } - } - } + "clientLibrary": { + "name": "nodejs-storage", + "version": "2.8.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta1", + "version": "v1beta1" + } + ] }, - { - "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async", - "title": "BigQueryStorage readRows Sample", - "origin": "API_DEFINITION", - "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.", - "canonical": true, - "file": "big_query_storage.read_rows.js", - "language": "JAVASCRIPT", - "segments": [ + "snippets": [ { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ReadRows", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", - "async": true, - "parameters": [ - { - "name": "read_position", - "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse", - "client": { - "shortName": "BigQueryStorageClient", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async", + "title": "BigQueryStorage createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 24 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_storage.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 83, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "table_reference", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReference" + }, + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "table_modifiers", + "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + }, + { + "name": "read_options", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions" + }, + { + "name": "format", + "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat" + }, + { + "name": "sharding_strategy", + "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } }, - "method": { - "shortName": "ReadRows", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", - "service": { - "shortName": "BigQueryStorage", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async", - "title": "BigQueryStorage batchCreateReadSessionStreams Sample", - "origin": "API_DEFINITION", - "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.", - "canonical": true, - "file": "big_query_storage.batch_create_read_session_streams.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "BatchCreateReadSessionStreams", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", - "async": true, - "parameters": [ - { - "name": "session", - "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession" - }, - { - "name": "requested_streams", - "type": "TYPE_INT32" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse", - "client": { - "shortName": "BigQueryStorageClient", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async", + "title": "BigQueryStorage readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.", + "canonical": true, + "file": "big_query_storage.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_position", + "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } }, - "method": { - "shortName": "BatchCreateReadSessionStreams", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", - "service": { - "shortName": "BigQueryStorage", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async", - "title": "BigQueryStorage finalizeStream Sample", - "origin": "API_DEFINITION", - "description": " Triggers the graceful termination of a single stream in a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", - "canonical": true, - "file": "big_query_storage.finalize_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 50, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "FinalizeStream", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", - "async": true, - "parameters": [ - { - "name": "stream", - "type": ".google.cloud.bigquery.storage.v1beta1.Stream" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "BigQueryStorageClient", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async", + "title": "BigQueryStorage batchCreateReadSessionStreams Sample", + "origin": "API_DEFINITION", + "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.", + "canonical": true, + "file": "big_query_storage.batch_create_read_session_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "async": true, + "parameters": [ + { + "name": "session", + "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } }, - "method": { - "shortName": "FinalizeStream", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", - "service": { - "shortName": "BigQueryStorage", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - } - } - } - }, - { - "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async", - "title": "BigQueryStorage splitReadStream Sample", - "origin": "API_DEFINITION", - "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.", - "canonical": true, - "file": "big_query_storage.split_read_stream.js", - "language": "JAVASCRIPT", - "segments": [ { - "start": 25, - "end": 60, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "SplitReadStream", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", - "async": true, - "parameters": [ - { - "name": "original_stream", - "type": ".google.cloud.bigquery.storage.v1beta1.Stream" - }, - { - "name": "fraction", - "type": "TYPE_FLOAT" - } - ], - "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse", - "client": { - "shortName": "BigQueryStorageClient", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async", + "title": "BigQueryStorage finalizeStream Sample", + "origin": "API_DEFINITION", + "description": " Triggers the graceful termination of a single stream in a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", + "canonical": true, + "file": "big_query_storage.finalize_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 50, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "async": true, + "parameters": [ + { + "name": "stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } }, - "method": { - "shortName": "SplitReadStream", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", - "service": { - "shortName": "BigQueryStorage", - "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" - } + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async", + "title": "BigQueryStorage splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.", + "canonical": true, + "file": "big_query_storage.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 60, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "original_stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + }, + { + "name": "fraction", + "type": "TYPE_FLOAT" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } } - } - } - ] -} + ] +} \ No newline at end of file From c013344aaae430450de114938baec6eece7b7a67 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 May 2022 17:35:00 -0400 Subject: [PATCH 174/333] chore(node): handle when package.json does not have a version (#270) --- .../bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- .../bigquery-storage/.github/auto-approve.yml | 15 +++------------ 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 9017db80d17..c612c534b4d 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:d106724ad2a96daa1b8d88de101ba50bdb30b8df62ffa0aa2b451d93b4556641 -# created: 2022-04-20T16:59:29.058398639Z + digest: sha256:079b0f3bd8427671745ec03a5179575b4c86a4e776fb6041427e553719c65c2b +# created: 2022-05-10T15:57:49.166896548Z diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml index 49cf942280a..4cd91cc16ae 100644 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -1,12 +1,3 @@ -rules: -- author: "release-please[bot]" - title: "^chore: release" - changedFiles: - - "package\\.json$" - - "CHANGELOG\\.md$" - maxFiles: 3 -- author: "renovate-bot" - title: "^(fix|chore)\\(deps\\):" - changedFiles: - - "package\\.json$" - maxFiles: 2 +processes: + - "NodeDependency" + - "OwlBotTemplateChanges" From 2429047a94dd2f0bd9ac2c413cde81f685101395 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Mon, 16 May 2022 17:39:32 -0700 Subject: [PATCH 175/333] build!: update library to use Node 12 (#272) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat!: Update library to use Node 12 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../.github/sync-repo-settings.yaml | 2 +- .../.kokoro/continuous/node10/common.cfg | 34 ------------------- .../.kokoro/continuous/node10/docs.cfg | 4 --- .../.kokoro/continuous/node10/test.cfg | 9 ----- .../.kokoro/continuous/node8/common.cfg | 24 ------------- .../.kokoro/continuous/node8/test.cfg | 0 .../.kokoro/presubmit/node10/common.cfg | 34 ------------------- .../.kokoro/presubmit/node10/docs.cfg | 4 --- .../.kokoro/presubmit/node10/lint.cfg | 4 --- .../.kokoro/presubmit/node10/test.cfg | 0 .../.kokoro/presubmit/node8/common.cfg | 24 ------------- .../.kokoro/presubmit/node8/test.cfg | 0 handwritten/bigquery-storage/package.json | 10 +++--- 13 files changed, 6 insertions(+), 143 deletions(-) delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml index 1b36268333a..d1e8b5e6e1a 100644 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -9,9 +9,9 @@ branchProtectionRules: - "ci/kokoro: System test" - docs - lint - - test (10) - test (12) - test (14) + - test (16) - cla/google - windows - OwlBot Post Processor diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg deleted file mode 100644 index 71061cf193b..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg deleted file mode 100644 index 5972e5b337c..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/docs.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/docs.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg deleted file mode 100644 index 609c0cf0a27..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node10/test.cfg +++ /dev/null @@ -1,9 +0,0 @@ -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg deleted file mode 100644 index d9c4fb600d5..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node8/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node8/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg deleted file mode 100644 index 71061cf193b..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node10/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg deleted file mode 100644 index 5972e5b337c..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node10/docs.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/docs.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg deleted file mode 100644 index 0a5d546b96b..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node10/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node10/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg deleted file mode 100644 index d9c4fb600d5..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node8/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node8/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2790c6440f6..658aa7f1e01 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,28 +27,28 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^2.24.1" + "google-gax": "^3.0.1" }, "devDependencies": { "@types/mocha": "^8.0.0", "@types/node": "^16.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", - "gts": "^3.0.0", + "gts": "^3.1.0", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", "jsdoc-region-tag": "^1.0.4", "linkinator": "^2.0.1", - "mocha": "^8.0.0", + "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^13.0.0", "ts-loader": "^9.0.0", - "typescript": "^3.8.3", + "typescript": "^4.6.4", "webpack": "^5.0.0", "webpack-cli": "^4.0.0" }, "engines": { - "node": ">=10" + "node": ">=12.0.0" } } From 7cfb6c730ce3bdd12d22571e4ae759663359f0f4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 31 May 2022 22:14:19 +0000 Subject: [PATCH 176/333] fix: fixes for dynamic routing and streaming descriptors (#274) - [ ] Regenerate this pull request now. Use gapic-generator-typescript v2.14.5. PiperOrigin-RevId: 450616838 Source-Link: https://github.com/googleapis/googleapis/commit/7a47b72791e0b84d78beca4c2b26bec42ce31572 Source-Link: https://github.com/googleapis/googleapis-gen/commit/42cc6331bae0b99f61b8e01ae15b05211716c4f9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDJjYzYzMzFiYWUwYjk5ZjYxYjhlMDFhZTE1YjA1MjExNzE2YzRmOSJ9 --- handwritten/bigquery-storage/src/v1/big_query_read_client.ts | 3 ++- handwritten/bigquery-storage/src/v1/big_query_write_client.ts | 3 ++- .../bigquery-storage/src/v1beta1/big_query_storage_client.ts | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index a0246aef5d2..525177ca372 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -185,7 +185,8 @@ export class BigQueryReadClient { // Provide descriptors for these. this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.SERVER_STREAMING + gax.StreamType.SERVER_STREAMING, + opts.fallback === 'rest' ), }; diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 1cb67db7dac..2ab96c5d6e9 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -188,7 +188,8 @@ export class BigQueryWriteClient { // Provide descriptors for these. this.descriptors.stream = { appendRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.BIDI_STREAMING + gax.StreamType.BIDI_STREAMING, + opts.fallback === 'rest' ), }; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 342ef1a6e9c..544d6d476ce 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -179,7 +179,8 @@ export class BigQueryStorageClient { // Provide descriptors for these. this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.SERVER_STREAMING + gax.StreamType.SERVER_STREAMING, + opts.fallback === 'rest' ), }; From b602bf4b5abe0747d4091112a99d713c49cb8375 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 7 Jun 2022 22:12:10 +0000 Subject: [PATCH 177/333] build(node): add new jsteam + enforce branches up-to-date (#1451) (#276) Source-Link: https://github.com/googleapis/synthtool/commit/cd785291d51d97003d1263056cd2b9de1849a0ab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:ddb19a6df6c1fa081bc99fb29658f306dd64668bc26f75d1353b28296f3a78e6 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- .../bigquery-storage/.github/sync-repo-settings.yaml | 9 ++++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index c612c534b4d..f3ca5561cb5 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:079b0f3bd8427671745ec03a5179575b4c86a4e776fb6041427e553719c65c2b -# created: 2022-05-10T15:57:49.166896548Z + digest: sha256:ddb19a6df6c1fa081bc99fb29658f306dd64668bc26f75d1353b28296f3a78e6 +# created: 2022-06-07T21:18:30.024751809Z diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml index d1e8b5e6e1a..4a30a08e54c 100644 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -3,7 +3,7 @@ branchProtectionRules: isAdminEnforced: true requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true - requiresStrictStatusChecks: false + requiresStrictStatusChecks: true requiredStatusCheckContexts: - "ci/kokoro: Samples test" - "ci/kokoro: System test" @@ -15,3 +15,10 @@ branchProtectionRules: - cla/google - windows - OwlBot Post Processor +permissionRules: + - team: yoshi-admins + permission: admin + - team: jsteam-admins + permission: admin + - team: jsteam + permission: push From 8f6214e3516fed063365be67b9217727ef35b5cf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 9 Jun 2022 20:37:38 +0200 Subject: [PATCH 178/333] chore(deps): update dependency @types/mocha to v9 (#264) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 658aa7f1e01..64e698facc6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -30,7 +30,7 @@ "google-gax": "^3.0.1" }, "devDependencies": { - "@types/mocha": "^8.0.0", + "@types/mocha": "^9.0.0", "@types/node": "^16.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", From 33d106922d55b363fa737da4784539d2ef8203b8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 10 Jun 2022 17:08:14 +0200 Subject: [PATCH 179/333] chore(deps): update dependency jsdoc-region-tag to v2 (#278) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jsdoc-region-tag](https://togithub.com/googleapis/jsdoc-region-tag) | [`^1.0.4` -> `^2.0.0`](https://renovatebot.com/diffs/npm/jsdoc-region-tag/1.3.1/2.0.0) | [![age](https://badges.renovateapi.com/packages/npm/jsdoc-region-tag/2.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/jsdoc-region-tag/2.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/jsdoc-region-tag/2.0.0/compatibility-slim/1.3.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/jsdoc-region-tag/2.0.0/confidence-slim/1.3.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/jsdoc-region-tag ### [`v2.0.0`](https://togithub.com/googleapis/jsdoc-region-tag/blob/HEAD/CHANGELOG.md#​200-httpsgithubcomgoogleapisjsdoc-region-tagcomparev131v200-2022-05-20) [Compare Source](https://togithub.com/googleapis/jsdoc-region-tag/compare/v1.3.1...v2.0.0) ##### ⚠ BREAKING CHANGES - update library to use Node 12 ([#​107](https://togithub.com/googleapis/jsdoc-region-tag/issues/107)) ##### Build System - update library to use Node 12 ([#​107](https://togithub.com/googleapis/jsdoc-region-tag/issues/107)) ([5b51796](https://togithub.com/googleapis/jsdoc-region-tag/commit/5b51796771984cf8b978990025f14faa03c19923)) ##### [1.3.1](https://www.github.com/googleapis/jsdoc-region-tag/compare/v1.3.0...v1.3.1) (2021-08-11) ##### Bug Fixes - **build:** migrate to using main branch ([#​79](https://www.togithub.com/googleapis/jsdoc-region-tag/issues/79)) ([5050615](https://www.github.com/googleapis/jsdoc-region-tag/commit/50506150b7758592df5e389c6a5c3d82b3b20881))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 64e698facc6..6d23856a4ce 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -37,7 +37,7 @@ "gts": "^3.1.0", "jsdoc": "^3.6.3", "jsdoc-fresh": "^1.0.2", - "jsdoc-region-tag": "^1.0.4", + "jsdoc-region-tag": "^2.0.0", "linkinator": "^2.0.1", "mocha": "^9.2.2", "null-loader": "^4.0.0", From e42ec75e6bd49127a010fb35415959a71eacb125 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 10 Jun 2022 17:32:36 +0200 Subject: [PATCH 180/333] chore(deps): update dependency jsdoc-fresh to v2 (#277) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jsdoc-fresh](https://togithub.com/googleapis/jsdoc-fresh) | [`^1.0.2` -> `^2.0.0`](https://renovatebot.com/diffs/npm/jsdoc-fresh/1.1.1/2.0.0) | [![age](https://badges.renovateapi.com/packages/npm/jsdoc-fresh/2.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/jsdoc-fresh/2.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/jsdoc-fresh/2.0.0/compatibility-slim/1.1.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/jsdoc-fresh/2.0.0/confidence-slim/1.1.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/jsdoc-fresh ### [`v2.0.0`](https://togithub.com/googleapis/jsdoc-fresh/blob/HEAD/CHANGELOG.md#​200-httpsgithubcomgoogleapisjsdoc-freshcomparev111v200-2022-05-18) [Compare Source](https://togithub.com/googleapis/jsdoc-fresh/compare/v1.1.1...v2.0.0) ##### ⚠ BREAKING CHANGES - update library to use Node 12 ([#​108](https://togithub.com/googleapis/jsdoc-fresh/issues/108)) ##### Build System - update library to use Node 12 ([#​108](https://togithub.com/googleapis/jsdoc-fresh/issues/108)) ([e61c223](https://togithub.com/googleapis/jsdoc-fresh/commit/e61c2238db8900e339e5fe7fb8aea09642290182)) ##### [1.1.1](https://www.github.com/googleapis/jsdoc-fresh/compare/v1.1.0...v1.1.1) (2021-08-11) ##### Bug Fixes - **build:** migrate to using main branch ([#​83](https://www.togithub.com/googleapis/jsdoc-fresh/issues/83)) ([9474adb](https://www.github.com/googleapis/jsdoc-fresh/commit/9474adbf0d559d319ff207397ba2be6b557999ac))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6d23856a4ce..2d47cd2c2ad 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -36,7 +36,7 @@ "c8": "^7.1.0", "gts": "^3.1.0", "jsdoc": "^3.6.3", - "jsdoc-fresh": "^1.0.2", + "jsdoc-fresh": "^2.0.0", "jsdoc-region-tag": "^2.0.0", "linkinator": "^2.0.1", "mocha": "^9.2.2", From bd6d0dd93f716484d27ff03f9b5ab0e1c7b3f1ee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 20 Jun 2022 21:11:32 +0200 Subject: [PATCH 181/333] chore(deps): update dependency sinon to v14 (#269) Co-authored-by: Steffany Brown <30247553+steffnay@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2d47cd2c2ad..635dbfce4ff 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -42,7 +42,7 @@ "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^13.0.0", + "sinon": "^14.0.0", "ts-loader": "^9.0.0", "typescript": "^4.6.4", "webpack": "^5.0.0", From 8d573cfcc86addf57c1ec690a78886f7c0f560e8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Jun 2022 12:40:42 -0400 Subject: [PATCH 182/333] fix: Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time (#279) feat: add fields to eventually contain row level errors Committer: @gnanda PiperOrigin-RevId: 456324780 Source-Link: https://github.com/googleapis/googleapis/commit/f24b37a351260ddce8208edae50d637fa0b88d6b Source-Link: https://github.com/googleapis/googleapis-gen/commit/33f9d814082117116c4b68a6f5aac3f42bec35c2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzNmOWQ4MTQwODIxMTcxMTZjNGI2OGE2ZjVhYWMzZjQyYmVjMzVjMiJ9 feat: support regapic LRO PiperOrigin-RevId: 456946341 Source-Link: https://github.com/googleapis/googleapis/commit/88fd18d9d3b872b3d06a3d9392879f50b5bf3ce5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/accfa371f667439313335c64042b063c1c53102e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWNjZmEzNzFmNjY3NDM5MzEzMzM1YzY0MDQyYjA2M2MxYzUzMTAyZSJ9 See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md fix: Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time PiperOrigin-RevId: 457061436 Source-Link: https://github.com/googleapis/googleapis/commit/8ff130bc81fa1d175e410d14a300caa18d5ebf80 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2eb0faca717d9cf44b838b7db5e862451b8a86ef Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmViMGZhY2E3MTdkOWNmNDRiODM4YjdkYjVlODYyNDUxYjhhODZlZiJ9 See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/storage.proto | 38 ++- .../cloud/bigquery/storage/v1/stream.proto | 3 +- .../cloud/bigquery/storage/v1/table.proto | 4 +- .../bigquery-storage/protos/protos.d.ts | 117 +++++++ handwritten/bigquery-storage/protos/protos.js | 317 ++++++++++++++++++ .../bigquery-storage/protos/protos.json | 29 ++ .../v1/big_query_read.create_read_session.js | 10 +- ...data.google.cloud.bigquery.storage.v1.json | 2 +- .../src/v1/big_query_read_client.ts | 21 +- .../src/v1/big_query_write_client.ts | 11 +- .../src/v1/big_query_write_client_config.json | 20 +- .../src/v1beta1/big_query_storage_client.ts | 11 +- 12 files changed, 546 insertions(+), 37 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 67c6c8a0295..f3c974c6461 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -248,11 +248,13 @@ message CreateReadSessionRequest { // Max initial number of streams. If unset or zero, the server will // provide a value of streams so as to produce reasonable throughput. Must be // non-negative. The number of streams may be lower than the requested number, - // depending on the amount parallelism that is reasonable for the table. Error - // will be returned if the max count is greater than the current system - // max limit of 1,000. + // depending on the amount parallelism that is reasonable for the table. + // There is a default system max limit of 1,000. // - // Streams must be read starting from offset 0. + // This must be greater than or equal to preferred_min_stream_count. + // Typically, clients should either leave this unset to let the system to + // determine an upper bound OR set this a size for the maximum "units of work" + // it can gracefully handle. int32 max_stream_count = 3; } @@ -329,7 +331,7 @@ message ReadRowsResponse { // The schema for the read. If read_options.selected_fields is set, the // schema may be different from the table schema as it will only contain - // the selected fields. This schema is equivelant to the one returned by + // the selected fields. This schema is equivalent to the one returned by // CreateSession. This field is only populated in the first ReadRowsResponse // RPC. oneof schema { @@ -488,6 +490,11 @@ message AppendRowsResponse { // use it to input new type of message. It will be empty when no schema // updates have occurred. TableSchema updated_schema = 3; + + // If a request failed due to corrupted rows, no rows in the batch will be + // appended. The API will return row level error info, so that the caller can + // remove the bad rows and retry the request. + repeated RowError row_errors = 4; } // Request message for `GetWriteStreamRequest`. @@ -622,3 +629,24 @@ message StorageError { // Message that describes the error. string error_message = 3; } + +// The message that presents row level error info in a request. +message RowError { + // Error code for `RowError`. + enum RowErrorCode { + // Default error. + ROW_ERROR_CODE_UNSPECIFIED = 0; + + // One or more fields in the row has errors. + FIELDS_ERROR = 1; + } + + // Index of the malformed row in the request. + int64 index = 1; + + // Structured error reason for a row error. + RowErrorCode code = 2; + + // Description of the issue encountered when processing the row. + string message = 3; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index bd1fa2ce98a..fd1e25b65fd 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -32,6 +32,7 @@ option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; // Data format for input or output data. enum DataFormat { + // Data format is unspecified. DATA_FORMAT_UNSPECIFIED = 0; // Avro is a standard open source row based file format. @@ -91,7 +92,7 @@ message ReadSession { // automatically assigned and currently cannot be specified or updated. google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Immutable. Data format of the output data. + // Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported. DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; // The schema for the read. If read_options.selected_fields is set, the diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 545f6292712..fa4f840c580 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -25,7 +25,9 @@ option java_outer_classname = "TableProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; -// Schema of a table. +// Schema of a table. This schema is a subset of +// google.cloud.bigquery.v2.TableSchema containing information necessary to +// generate valid message to write to BigQuery. message TableSchema { // Describes the fields in a table. repeated TableFieldSchema fields = 1; diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index a82f40762cd..b111ca1f564 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -2036,6 +2036,9 @@ export namespace google { /** AppendRowsResponse updatedSchema */ updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** AppendRowsResponse rowErrors */ + rowErrors?: (google.cloud.bigquery.storage.v1.IRowError[]|null); } /** Represents an AppendRowsResponse. */ @@ -2056,6 +2059,9 @@ export namespace google { /** AppendRowsResponse updatedSchema. */ public updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + /** AppendRowsResponse rowErrors. */ + public rowErrors: google.cloud.bigquery.storage.v1.IRowError[]; + /** AppendRowsResponse response. */ public response?: ("appendResult"|"error"); @@ -2990,6 +2996,117 @@ export namespace google { } } + /** Properties of a RowError. */ + interface IRowError { + + /** RowError index */ + index?: (number|Long|string|null); + + /** RowError code */ + code?: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null); + + /** RowError message */ + message?: (string|null); + } + + /** Represents a RowError. */ + class RowError implements IRowError { + + /** + * Constructs a new RowError. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IRowError); + + /** RowError index. */ + public index: (number|Long|string); + + /** RowError code. */ + public code: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode); + + /** RowError message. */ + public message: string; + + /** + * Creates a new RowError instance using the specified properties. + * @param [properties] Properties to set + * @returns RowError instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IRowError): google.cloud.bigquery.storage.v1.RowError; + + /** + * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @param message RowError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @param message RowError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a RowError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.RowError; + + /** + * Decodes a RowError message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.RowError; + + /** + * Verifies a RowError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a RowError message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns RowError + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.RowError; + + /** + * Creates a plain object from a RowError message. Also converts values to other types if specified. + * @param message RowError + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.RowError, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this RowError to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + namespace RowError { + + /** RowErrorCode enum. */ + enum RowErrorCode { + ROW_ERROR_CODE_UNSPECIFIED = 0, + FIELDS_ERROR = 1 + } + } + /** DataFormat enum. */ enum DataFormat { DATA_FORMAT_UNSPECIFIED = 0, diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 1d0989c2671..ea81e6448f4 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -4532,6 +4532,7 @@ * @property {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null} [appendResult] AppendRowsResponse appendResult * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema + * @property {Array.|null} [rowErrors] AppendRowsResponse rowErrors */ /** @@ -4543,6 +4544,7 @@ * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set */ function AppendRowsResponse(properties) { + this.rowErrors = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4573,6 +4575,14 @@ */ AppendRowsResponse.prototype.updatedSchema = null; + /** + * AppendRowsResponse rowErrors. + * @member {Array.} rowErrors + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.rowErrors = $util.emptyArray; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -4617,6 +4627,9 @@ $root.google.rpc.Status.encode(message.error, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.updatedSchema != null && Object.hasOwnProperty.call(message, "updatedSchema")) $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.updatedSchema, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.rowErrors != null && message.rowErrors.length) + for (var i = 0; i < message.rowErrors.length; ++i) + $root.google.cloud.bigquery.storage.v1.RowError.encode(message.rowErrors[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -4660,6 +4673,11 @@ case 3: message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); break; + case 4: + if (!(message.rowErrors && message.rowErrors.length)) + message.rowErrors = []; + message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); + break; default: reader.skipType(tag & 7); break; @@ -4719,6 +4737,15 @@ if (error) return "updatedSchema." + error; } + if (message.rowErrors != null && message.hasOwnProperty("rowErrors")) { + if (!Array.isArray(message.rowErrors)) + return "rowErrors: array expected"; + for (var i = 0; i < message.rowErrors.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.RowError.verify(message.rowErrors[i]); + if (error) + return "rowErrors." + error; + } + } return null; }; @@ -4749,6 +4776,16 @@ throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.updatedSchema: object expected"); message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.updatedSchema); } + if (object.rowErrors) { + if (!Array.isArray(object.rowErrors)) + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: array expected"); + message.rowErrors = []; + for (var i = 0; i < object.rowErrors.length; ++i) { + if (typeof object.rowErrors[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: object expected"); + message.rowErrors[i] = $root.google.cloud.bigquery.storage.v1.RowError.fromObject(object.rowErrors[i]); + } + } return message; }; @@ -4765,6 +4802,8 @@ if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.rowErrors = []; if (options.defaults) object.updatedSchema = null; if (message.appendResult != null && message.hasOwnProperty("appendResult")) { @@ -4779,6 +4818,11 @@ } if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) object.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.updatedSchema, options); + if (message.rowErrors && message.rowErrors.length) { + object.rowErrors = []; + for (var j = 0; j < message.rowErrors.length; ++j) + object.rowErrors[j] = $root.google.cloud.bigquery.storage.v1.RowError.toObject(message.rowErrors[j], options); + } return object; }; @@ -6756,6 +6800,279 @@ return StorageError; })(); + v1.RowError = (function() { + + /** + * Properties of a RowError. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IRowError + * @property {number|Long|null} [index] RowError index + * @property {google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null} [code] RowError code + * @property {string|null} [message] RowError message + */ + + /** + * Constructs a new RowError. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a RowError. + * @implements IRowError + * @constructor + * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set + */ + function RowError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * RowError index. + * @member {number|Long} index + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.index = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * RowError code. + * @member {google.cloud.bigquery.storage.v1.RowError.RowErrorCode} code + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.code = 0; + + /** + * RowError message. + * @member {string} message + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.message = ""; + + /** + * Creates a new RowError instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError instance + */ + RowError.create = function create(properties) { + return new RowError(properties); + }; + + /** + * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RowError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.index != null && Object.hasOwnProperty.call(message, "index")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.index); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.code); + if (message.message != null && Object.hasOwnProperty.call(message, "message")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.message); + return writer; + }; + + /** + * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RowError.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a RowError message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RowError.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.RowError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.index = reader.int64(); + break; + case 2: + message.code = reader.int32(); + break; + case 3: + message.message = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a RowError message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RowError.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a RowError message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + RowError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.index != null && message.hasOwnProperty("index")) + if (!$util.isInteger(message.index) && !(message.index && $util.isInteger(message.index.low) && $util.isInteger(message.index.high))) + return "index: integer|Long expected"; + if (message.code != null && message.hasOwnProperty("code")) + switch (message.code) { + default: + return "code: enum value expected"; + case 0: + case 1: + break; + } + if (message.message != null && message.hasOwnProperty("message")) + if (!$util.isString(message.message)) + return "message: string expected"; + return null; + }; + + /** + * Creates a RowError message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + */ + RowError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.RowError) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.RowError(); + if (object.index != null) + if ($util.Long) + (message.index = $util.Long.fromValue(object.index)).unsigned = false; + else if (typeof object.index === "string") + message.index = parseInt(object.index, 10); + else if (typeof object.index === "number") + message.index = object.index; + else if (typeof object.index === "object") + message.index = new $util.LongBits(object.index.low >>> 0, object.index.high >>> 0).toNumber(); + switch (object.code) { + case "ROW_ERROR_CODE_UNSPECIFIED": + case 0: + message.code = 0; + break; + case "FIELDS_ERROR": + case 1: + message.code = 1; + break; + } + if (object.message != null) + message.message = String(object.message); + return message; + }; + + /** + * Creates a plain object from a RowError message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.RowError} message RowError + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + RowError.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.index = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.index = options.longs === String ? "0" : 0; + object.code = options.enums === String ? "ROW_ERROR_CODE_UNSPECIFIED" : 0; + object.message = ""; + } + if (message.index != null && message.hasOwnProperty("index")) + if (typeof message.index === "number") + object.index = options.longs === String ? String(message.index) : message.index; + else + object.index = options.longs === String ? $util.Long.prototype.toString.call(message.index) : options.longs === Number ? new $util.LongBits(message.index.low >>> 0, message.index.high >>> 0).toNumber() : message.index; + if (message.code != null && message.hasOwnProperty("code")) + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] : message.code; + if (message.message != null && message.hasOwnProperty("message")) + object.message = message.message; + return object; + }; + + /** + * Converts this RowError to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + * @returns {Object.} JSON object + */ + RowError.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * RowErrorCode enum. + * @name google.cloud.bigquery.storage.v1.RowError.RowErrorCode + * @enum {number} + * @property {number} ROW_ERROR_CODE_UNSPECIFIED=0 ROW_ERROR_CODE_UNSPECIFIED value + * @property {number} FIELDS_ERROR=1 FIELDS_ERROR value + */ + RowError.RowErrorCode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ROW_ERROR_CODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "FIELDS_ERROR"] = 1; + return values; + })(); + + return RowError; + })(); + /** * DataFormat enum. * @name google.cloud.bigquery.storage.v1.DataFormat diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index a892cd68782..d796f2c8745 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -523,6 +523,11 @@ "updatedSchema": { "type": "TableSchema", "id": 3 + }, + "rowErrors": { + "rule": "repeated", + "type": "RowError", + "id": 4 } }, "nested": { @@ -657,6 +662,30 @@ } } }, + "RowError": { + "fields": { + "index": { + "type": "int64", + "id": 1 + }, + "code": { + "type": "RowErrorCode", + "id": 2 + }, + "message": { + "type": "string", + "id": 3 + } + }, + "nested": { + "RowErrorCode": { + "values": { + "ROW_ERROR_CODE_UNSPECIFIED": 0, + "FIELDS_ERROR": 1 + } + } + } + }, "DataFormat": { "values": { "DATA_FORMAT_UNSPECIFIED": 0, diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index f4d2f95025e..c5cc160a217 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -38,10 +38,12 @@ function main(parent, readSession) { * Max initial number of streams. If unset or zero, the server will * provide a value of streams so as to produce reasonable throughput. Must be * non-negative. The number of streams may be lower than the requested number, - * depending on the amount parallelism that is reasonable for the table. Error - * will be returned if the max count is greater than the current system - * max limit of 1,000. - * Streams must be read starting from offset 0. + * depending on the amount parallelism that is reasonable for the table. + * There is a default system max limit of 1,000. + * This must be greater than or equal to preferred_min_stream_count. + * Typically, clients should either leave this unset to let the system to + * determine an upper bound OR set this a size for the maximum "units of work" + * it can gracefully handle. */ // const maxStreamCount = 1234 diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 93aaa2255ba..90db40853ef 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 66, + "end": 68, "type": "FULL" } ], diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 525177ca372..553fe889849 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -70,7 +70,7 @@ export class BigQueryReadClient { * * @param {object} [options] - The configuration object. * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] @@ -93,11 +93,10 @@ export class BigQueryReadClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. */ constructor(opts?: ClientOptions) { // Ensure that options include all the required fields. @@ -370,11 +369,13 @@ export class BigQueryReadClient { * Max initial number of streams. If unset or zero, the server will * provide a value of streams so as to produce reasonable throughput. Must be * non-negative. The number of streams may be lower than the requested number, - * depending on the amount parallelism that is reasonable for the table. Error - * will be returned if the max count is greater than the current system - * max limit of 1,000. + * depending on the amount parallelism that is reasonable for the table. + * There is a default system max limit of 1,000. * - * Streams must be read starting from offset 0. + * This must be greater than or equal to preferred_min_stream_count. + * Typically, clients should either leave this unset to let the system to + * determine an upper bound OR set this a size for the maximum "units of work" + * it can gracefully handle. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 2ab96c5d6e9..1e0569e1f7e 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -73,7 +73,7 @@ export class BigQueryWriteClient { * * @param {object} [options] - The configuration object. * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] @@ -96,11 +96,10 @@ export class BigQueryWriteClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. */ constructor(opts?: ClientOptions) { // Ensure that options include all the required fields. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json index 67eb3165cb6..4b7f4b0657b 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json @@ -7,6 +7,11 @@ "DEADLINE_EXCEEDED", "UNAVAILABLE" ], + "deadline_exceeded_resource_exhausted_unavailable": [ + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE" + ], "unavailable": [ "UNAVAILABLE" ] @@ -20,13 +25,22 @@ "rpc_timeout_multiplier": 1, "max_rpc_timeout_millis": 60000, "total_timeout_millis": 600000 + }, + "ec82364a95d03873ac5f61710bb6b9b42e40f31d": { + "initial_retry_delay_millis": 10000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 120000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 } }, "methods": { "CreateWriteStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" + "timeout_millis": 1200000, + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", + "retry_params_name": "ec82364a95d03873ac5f61710bb6b9b42e40f31d" }, "AppendRows": { "timeout_millis": 86400000, diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 544d6d476ce..e1c27a9e683 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -70,7 +70,7 @@ export class BigQueryStorageClient { * * @param {object} [options] - The configuration object. * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). * The common options are: * @param {object} [options.credentials] - Credentials object. * @param {string} [options.credentials.client_email] @@ -93,11 +93,10 @@ export class BigQueryStorageClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. */ constructor(opts?: ClientOptions) { // Ensure that options include all the required fields. From 75dec1cccf9ff35070b95635ac648b1699f89ea9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Jun 2022 19:40:14 +0000 Subject: [PATCH 183/333] chore(main): release 3.0.0 (#273) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* --- ## [3.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v2.8.0...v3.0.0) (2022-06-29) ### ⚠ BREAKING CHANGES * update library to use Node 12 (#272) ### Features * Deprecate format specific `row_count` field in Read API ([#249](https://github.com/googleapis/nodejs-bigquery-storage/issues/249)) ([fb8acf1](https://github.com/googleapis/nodejs-bigquery-storage/commit/fb8acf1f4eab7823132159bcf5927c9eda6374e2)) ### Bug Fixes * fixes for dynamic routing and streaming descriptors ([#274](https://github.com/googleapis/nodejs-bigquery-storage/issues/274)) ([4271ea0](https://github.com/googleapis/nodejs-bigquery-storage/commit/4271ea0aaa98286696eb6822d0bef82a655a5811)) * Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time ([#279](https://github.com/googleapis/nodejs-bigquery-storage/issues/279)) ([849cc23](https://github.com/googleapis/nodejs-bigquery-storage/commit/849cc237081e63a585264a62d49e9407d2f14450)) ### Build System * update library to use Node 12 ([#272](https://github.com/googleapis/nodejs-bigquery-storage/issues/272)) ([5e774e6](https://github.com/googleapis/nodejs-bigquery-storage/commit/5e774e614132f189362d56c502960d87200a11a0)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 22 +++++++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...data.google.cloud.bigquery.storage.v1.json | 2 +- ...google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 25 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 7f8d97f132f..5d9719cffc8 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [3.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v2.8.0...v3.0.0) (2022-06-29) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#272) + +### Features + +* Deprecate format specific `row_count` field in Read API ([#249](https://github.com/googleapis/nodejs-bigquery-storage/issues/249)) ([fb8acf1](https://github.com/googleapis/nodejs-bigquery-storage/commit/fb8acf1f4eab7823132159bcf5927c9eda6374e2)) + + +### Bug Fixes + +* fixes for dynamic routing and streaming descriptors ([#274](https://github.com/googleapis/nodejs-bigquery-storage/issues/274)) ([4271ea0](https://github.com/googleapis/nodejs-bigquery-storage/commit/4271ea0aaa98286696eb6822d0bef82a655a5811)) +* Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time ([#279](https://github.com/googleapis/nodejs-bigquery-storage/issues/279)) ([849cc23](https://github.com/googleapis/nodejs-bigquery-storage/commit/849cc237081e63a585264a62d49e9407d2f14450)) + + +### Build System + +* update library to use Node 12 ([#272](https://github.com/googleapis/nodejs-bigquery-storage/issues/272)) ([5e774e6](https://github.com/googleapis/nodejs-bigquery-storage/commit/5e774e614132f189362d56c502960d87200a11a0)) + ## [2.8.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.7.0...v2.8.0) (2021-12-30) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 635dbfce4ff..a2e9713f6d9 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "2.8.0", + "version": "3.0.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 90db40853ef..28d36811dc6 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "2.8.0", + "version": "3.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 4bed0e60cc4..58ffbbc5164 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "2.8.0", + "version": "3.0.0", "language": "TYPESCRIPT", "apis": [ { From 0358d01d280568a37ce1c3b04281987441e869dd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 8 Jul 2022 23:02:15 +0200 Subject: [PATCH 184/333] chore(deps): update dependency linkinator to v4 (#282) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [linkinator](https://togithub.com/JustinBeckwith/linkinator) | [`^2.0.1` -> `^4.0.0`](https://renovatebot.com/diffs/npm/linkinator/2.16.2/4.0.0) | [![age](https://badges.renovateapi.com/packages/npm/linkinator/4.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/linkinator/4.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/linkinator/4.0.0/compatibility-slim/2.16.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/linkinator/4.0.0/confidence-slim/2.16.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
JustinBeckwith/linkinator ### [`v4.0.0`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v4.0.0) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.1.0...v4.0.0) ##### Features - create new release with notes ([#​508](https://togithub.com/JustinBeckwith/linkinator/issues/508)) ([2cab633](https://togithub.com/JustinBeckwith/linkinator/commit/2cab633c9659eb10794a4bac06f8b0acdc3e2c0c)) ##### BREAKING CHANGES - The commits in [#​507](https://togithub.com/JustinBeckwith/linkinator/issues/507) and [#​506](https://togithub.com/JustinBeckwith/linkinator/issues/506) both had breaking changes. They included dropping support for Node.js 12.x and updating the CSV export to be streaming, and to use a new way of writing the CSV file. This is an empty to commit using the `BREAKING CHANGE` format in the commit message to ensure a release is triggered. ### [`v3.1.0`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.1.0) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.6...v3.1.0) ##### Features - allow --skip to be defined multiple times ([#​399](https://togithub.com/JustinBeckwith/linkinator/issues/399)) ([5ca5a46](https://togithub.com/JustinBeckwith/linkinator/commit/5ca5a461508e688de12e5ae6b4cfb6565f832ebf)) ### [`v3.0.6`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.6) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.5...v3.0.6) ##### Bug Fixes - **deps:** upgrade node-glob to v8 ([#​397](https://togithub.com/JustinBeckwith/linkinator/issues/397)) ([d334dc6](https://togithub.com/JustinBeckwith/linkinator/commit/d334dc6734cd7c2b73d7ed3dea0550a6c3072ad5)) ### [`v3.0.5`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.5) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.4...v3.0.5) ##### Bug Fixes - **deps:** upgrade to htmlparser2 v8.0.1 ([#​396](https://togithub.com/JustinBeckwith/linkinator/issues/396)) ([ba3b9a8](https://togithub.com/JustinBeckwith/linkinator/commit/ba3b9a8a9b19d39af6ed91790135e833b80c1eb6)) ### [`v3.0.4`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.4) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.3...v3.0.4) ##### Bug Fixes - **deps:** update dependency gaxios to v5 ([#​391](https://togithub.com/JustinBeckwith/linkinator/issues/391)) ([48af50e](https://togithub.com/JustinBeckwith/linkinator/commit/48af50e787731204aeb7eff41325c62291311e45)) ### [`v3.0.3`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.3) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.2...v3.0.3) ##### Bug Fixes - export getConfig from index ([#​371](https://togithub.com/JustinBeckwith/linkinator/issues/371)) ([0bc0355](https://togithub.com/JustinBeckwith/linkinator/commit/0bc0355c7e2ea457f247e6b52d1577b8c4ecb3a1)) ### [`v3.0.2`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.2) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.1...v3.0.2) ##### Bug Fixes - allow server root with trailing slash ([#​370](https://togithub.com/JustinBeckwith/linkinator/issues/370)) ([8adf6b0](https://togithub.com/JustinBeckwith/linkinator/commit/8adf6b025fda250e38461f1cdad40fe08c3b3b7c)) ### [`v3.0.1`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.1) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v3.0.0...v3.0.1) ##### Bug Fixes - decode path parts in local web server ([#​369](https://togithub.com/JustinBeckwith/linkinator/issues/369)) ([4696a0c](https://togithub.com/JustinBeckwith/linkinator/commit/4696a0c38c341b178ed815f47371fca955979feb)) ### [`v3.0.0`](https://togithub.com/JustinBeckwith/linkinator/releases/tag/v3.0.0) [Compare Source](https://togithub.com/JustinBeckwith/linkinator/compare/v2.16.2...v3.0.0) ##### Bug Fixes - **deps:** update dependency chalk to v5 ([#​362](https://togithub.com/JustinBeckwith/linkinator/issues/362)) ([4b17a8d](https://togithub.com/JustinBeckwith/linkinator/commit/4b17a8d87b649eaf813428f8ee6955e1d21dae4f)) - feat!: convert to es modules, drop node 10 ([#​359](https://togithub.com/JustinBeckwith/linkinator/issues/359)) ([efee299](https://togithub.com/JustinBeckwith/linkinator/commit/efee299ab8a805accef751eecf8538915a4e7783)), closes [#​359](https://togithub.com/JustinBeckwith/linkinator/issues/359) ##### BREAKING CHANGES - this module now requires node.js 12 and above, and has moved to es modules by default.
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a2e9713f6d9..367d3085b85 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -38,7 +38,7 @@ "jsdoc": "^3.6.3", "jsdoc-fresh": "^2.0.0", "jsdoc-region-tag": "^2.0.0", - "linkinator": "^2.0.1", + "linkinator": "^4.0.0", "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", From b96b31b3c141b17a897ebbc32774390564e11147 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 1 Aug 2022 10:50:43 -0700 Subject: [PATCH 185/333] docs: clarify size limitations for AppendRowsRequest (#283) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: clarify size limitations for AppendRowsRequest chore: add preferred_min_stream_count to CreateReadSessionRequest chore: add write_stream to AppendRowsResponse PiperOrigin-RevId: 463602530 Source-Link: https://github.com/googleapis/googleapis/commit/d33b3fa0897cee1cc57b5b428587052c87e9bf25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/90995f6433d0ecd290f186168ce957d6a0db9c68 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTA5OTVmNjQzM2QwZWNkMjkwZjE4NjE2OGNlOTU3ZDZhMGRiOWM2OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/storage.proto | 17 +++++++ .../bigquery-storage/protos/protos.d.ts | 12 +++++ handwritten/bigquery-storage/protos/protos.js | 47 ++++++++++++++++++- .../bigquery-storage/protos/protos.json | 8 ++++ .../v1/big_query_read.create_read_session.js | 10 ++++ ...data.google.cloud.bigquery.storage.v1.json | 6 ++- .../src/v1/big_query_read_client.ts | 9 ++++ 7 files changed, 107 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index f3c974c6461..e0b25c1afef 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -256,6 +256,16 @@ message CreateReadSessionRequest { // determine an upper bound OR set this a size for the maximum "units of work" // it can gracefully handle. int32 max_stream_count = 3; + + // The minimum preferred stream count. This parameter can be used to inform + // the service that there is a desired lower bound on the number of streams. + // This is typically a target parallelism of the client (e.g. a Spark + // cluster with N-workers would set this to a low multiple of N to ensure + // good cluster utilization). + // + // The system will make a best effort to provide at least this number of + // streams, but in some cases might provide less. + int32 preferred_min_stream_count = 4; } // Request message for `ReadRows`. @@ -395,6 +405,9 @@ message CreateWriteStreamRequest { // Due to the nature of AppendRows being a bidirectional streaming RPC, certain // parts of the AppendRowsRequest need only be specified for the first request // sent each time the gRPC network connection is opened/reopened. +// +// The size of a single AppendRowsRequest must be less than 10 MB in size. +// Requests larger than this return an error, typically `INVALID_ARGUMENT`. message AppendRowsRequest { // ProtoData contains the data rows and schema when constructing append // requests. @@ -495,6 +508,10 @@ message AppendRowsResponse { // appended. The API will return row level error info, so that the caller can // remove the bad rows and retry the request. repeated RowError row_errors = 4; + + // The target of the append operation. Matches the write_stream in the + // corresponding request. + string write_stream = 5; } // Request message for `GetWriteStreamRequest`. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index b111ca1f564..197d0abe8c4 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -929,6 +929,9 @@ export namespace google { /** CreateReadSessionRequest maxStreamCount */ maxStreamCount?: (number|null); + + /** CreateReadSessionRequest preferredMinStreamCount */ + preferredMinStreamCount?: (number|null); } /** Represents a CreateReadSessionRequest. */ @@ -949,6 +952,9 @@ export namespace google { /** CreateReadSessionRequest maxStreamCount. */ public maxStreamCount: number; + /** CreateReadSessionRequest preferredMinStreamCount. */ + public preferredMinStreamCount: number; + /** * Creates a new CreateReadSessionRequest instance using the specified properties. * @param [properties] Properties to set @@ -2039,6 +2045,9 @@ export namespace google { /** AppendRowsResponse rowErrors */ rowErrors?: (google.cloud.bigquery.storage.v1.IRowError[]|null); + + /** AppendRowsResponse writeStream */ + writeStream?: (string|null); } /** Represents an AppendRowsResponse. */ @@ -2062,6 +2071,9 @@ export namespace google { /** AppendRowsResponse rowErrors. */ public rowErrors: google.cloud.bigquery.storage.v1.IRowError[]; + /** AppendRowsResponse writeStream. */ + public writeStream: string; + /** AppendRowsResponse response. */ public response?: ("appendResult"|"error"); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index ea81e6448f4..32551b9ff33 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1919,6 +1919,7 @@ * @property {string|null} [parent] CreateReadSessionRequest parent * @property {google.cloud.bigquery.storage.v1.IReadSession|null} [readSession] CreateReadSessionRequest readSession * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount + * @property {number|null} [preferredMinStreamCount] CreateReadSessionRequest preferredMinStreamCount */ /** @@ -1960,6 +1961,14 @@ */ CreateReadSessionRequest.prototype.maxStreamCount = 0; + /** + * CreateReadSessionRequest preferredMinStreamCount. + * @member {number} preferredMinStreamCount + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.preferredMinStreamCount = 0; + /** * Creates a new CreateReadSessionRequest instance using the specified properties. * @function create @@ -1990,6 +1999,8 @@ $root.google.cloud.bigquery.storage.v1.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.maxStreamCount != null && Object.hasOwnProperty.call(message, "maxStreamCount")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount); + if (message.preferredMinStreamCount != null && Object.hasOwnProperty.call(message, "preferredMinStreamCount")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.preferredMinStreamCount); return writer; }; @@ -2033,6 +2044,9 @@ case 3: message.maxStreamCount = reader.int32(); break; + case 4: + message.preferredMinStreamCount = reader.int32(); + break; default: reader.skipType(tag & 7); break; @@ -2079,6 +2093,9 @@ if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) if (!$util.isInteger(message.maxStreamCount)) return "maxStreamCount: integer expected"; + if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) + if (!$util.isInteger(message.preferredMinStreamCount)) + return "preferredMinStreamCount: integer expected"; return null; }; @@ -2103,6 +2120,8 @@ } if (object.maxStreamCount != null) message.maxStreamCount = object.maxStreamCount | 0; + if (object.preferredMinStreamCount != null) + message.preferredMinStreamCount = object.preferredMinStreamCount | 0; return message; }; @@ -2123,6 +2142,7 @@ object.parent = ""; object.readSession = null; object.maxStreamCount = 0; + object.preferredMinStreamCount = 0; } if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; @@ -2130,6 +2150,8 @@ object.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.toObject(message.readSession, options); if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) object.maxStreamCount = message.maxStreamCount; + if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) + object.preferredMinStreamCount = message.preferredMinStreamCount; return object; }; @@ -4533,6 +4555,7 @@ * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema * @property {Array.|null} [rowErrors] AppendRowsResponse rowErrors + * @property {string|null} [writeStream] AppendRowsResponse writeStream */ /** @@ -4583,6 +4606,14 @@ */ AppendRowsResponse.prototype.rowErrors = $util.emptyArray; + /** + * AppendRowsResponse writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.writeStream = ""; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -4630,6 +4661,8 @@ if (message.rowErrors != null && message.rowErrors.length) for (var i = 0; i < message.rowErrors.length; ++i) $root.google.cloud.bigquery.storage.v1.RowError.encode(message.rowErrors[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.writeStream); return writer; }; @@ -4678,6 +4711,9 @@ message.rowErrors = []; message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); break; + case 5: + message.writeStream = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -4746,6 +4782,9 @@ return "rowErrors." + error; } } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; return null; }; @@ -4786,6 +4825,8 @@ message.rowErrors[i] = $root.google.cloud.bigquery.storage.v1.RowError.fromObject(object.rowErrors[i]); } } + if (object.writeStream != null) + message.writeStream = String(object.writeStream); return message; }; @@ -4804,8 +4845,10 @@ var object = {}; if (options.arrays || options.defaults) object.rowErrors = []; - if (options.defaults) + if (options.defaults) { object.updatedSchema = null; + object.writeStream = ""; + } if (message.appendResult != null && message.hasOwnProperty("appendResult")) { object.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.toObject(message.appendResult, options); if (options.oneofs) @@ -4823,6 +4866,8 @@ for (var j = 0; j < message.rowErrors.length; ++j) object.rowErrors[j] = $root.google.cloud.bigquery.storage.v1.RowError.toObject(message.rowErrors[j], options); } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index d796f2c8745..25abbfdca79 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -309,6 +309,10 @@ "maxStreamCount": { "type": "int32", "id": 3 + }, + "preferredMinStreamCount": { + "type": "int32", + "id": 4 } } }, @@ -528,6 +532,10 @@ "rule": "repeated", "type": "RowError", "id": 4 + }, + "writeStream": { + "type": "string", + "id": 5 } }, "nested": { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index c5cc160a217..1f239c8f2e1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -46,6 +46,16 @@ function main(parent, readSession) { * it can gracefully handle. */ // const maxStreamCount = 1234 + /** + * The minimum preferred stream count. This parameter can be used to inform + * the service that there is a desired lower bound on the number of streams. + * This is typically a target parallelism of the client (e.g. a Spark + * cluster with N-workers would set this to a low multiple of N to ensure + * good cluster utilization). + * The system will make a best effort to provide at least this number of + * streams, but in some cases might provide less. + */ + // const preferredMinStreamCount = 1234 // Imports the Storage library const {BigQueryReadClient} = require('@google-cloud/bigquery-storage').v1; diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 28d36811dc6..ef8132bd613 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 68, + "end": 78, "type": "FULL" } ], @@ -42,6 +42,10 @@ { "name": "max_stream_count", "type": "TYPE_INT32" + }, + { + "name": "preferred_min_stream_count", + "type": "TYPE_INT32" } ], "resultType": ".google.cloud.bigquery.storage.v1.ReadSession", diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 553fe889849..671bb00f297 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -376,6 +376,15 @@ export class BigQueryReadClient { * Typically, clients should either leave this unset to let the system to * determine an upper bound OR set this a size for the maximum "units of work" * it can gracefully handle. + * @param {number} request.preferredMinStreamCount + * The minimum preferred stream count. This parameter can be used to inform + * the service that there is a desired lower bound on the number of streams. + * This is typically a target parallelism of the client (e.g. a Spark + * cluster with N-workers would set this to a low multiple of N to ensure + * good cluster utilization). + * + * The system will make a best effort to provide at least this number of + * streams, but in some cases might provide less. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. From fe69ee4a5ce5ea2e4fc340329f8d31fb1c7deb06 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 18 Aug 2022 08:55:09 -0700 Subject: [PATCH 186/333] feat: allow users to set Apache Avro output format options through avro_serialization_options param in TableReadOptions message (#284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: allow users to set Apache Avro output format options through avro_serialization_options param in TableReadOptions message Through AvroSerializationOptions, users can set enable_display_name_attribute, which populates displayName for every avro field with the original column name Improved documentation for selected_fields, added example for clarity. PiperOrigin-RevId: 468290142 Source-Link: https://github.com/googleapis/googleapis/commit/62ae1af49c90b68071b098f64189c3ecea125033 Source-Link: https://github.com/googleapis/googleapis-gen/commit/732b7f922477d25328b92359f0667fe95de52fa2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzMyYjdmOTIyNDc3ZDI1MzI4YjkyMzU5ZjA2NjdmZTk1ZGU1MmZhMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/avro.proto | 15 ++ .../cloud/bigquery/storage/v1/stream.proto | 54 ++++- .../bigquery-storage/protos/protos.d.ts | 98 +++++++- handwritten/bigquery-storage/protos/protos.js | 225 +++++++++++++++++- .../bigquery-storage/protos/protos.json | 18 +- 5 files changed, 402 insertions(+), 8 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index 15de2db5468..e1ecb667b61 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -39,3 +39,18 @@ message AvroRows { // Please use the format-independent ReadRowsResponse.row_count instead. int64 row_count = 2 [deprecated = true]; } + +// Contains options specific to Avro Serialization. +message AvroSerializationOptions { + // Enable displayName attribute in Avro schema. + // + // The Avro specification requires field names to be alphanumeric. By + // default, in cases when column names do not conform to these requirements + // (e.g. non-ascii unicode codepoints) and Avro is requested as an output + // format, the CreateReadSession call will fail. + // + // Setting this field to true, populates avro field names with a placeholder + // value and populates a "displayName" attribute for every avro field with the + // original column name. + bool enable_display_name_attribute = 1; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index fd1e25b65fd..3735c73d670 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -59,10 +59,53 @@ message ReadSession { // Options dictating how we read a table. message TableReadOptions { - // Names of the fields in the table that should be read. If empty, all - // fields will be read. If the specified field is a nested field, all - // the sub-fields in the field will be selected. The output field order is - // unrelated to the order of fields in selected_fields. + // Optional. The names of the fields in the table to be returned. If no + // field names are specified, then all fields in the table are returned. + // + // Nested fields -- the child elements of a STRUCT field -- can be selected + // individually using their fully-qualified names, and will be returned as + // record fields containing only the selected nested fields. If a STRUCT + // field is specified in the selected fields list, all of the child elements + // will be returned. + // + // As an example, consider a table with the following schema: + // + // { + // "name": "struct_field", + // "type": "RECORD", + // "mode": "NULLABLE", + // "fields": [ + // { + // "name": "string_field1", + // "type": "STRING", + // . "mode": "NULLABLE" + // }, + // { + // "name": "string_field2", + // "type": "STRING", + // "mode": "NULLABLE" + // } + // ] + // } + // + // Specifying "struct_field" in the selected fields list will result in a + // read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // string_field2 + // } + // + // Specifying "struct_field.string_field1" in the selected fields list will + // result in a read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // } + // + // The order of the fields in the read session schema is derived from the + // table schema and does not correspond to the order in which the fields are + // specified in this list. repeated string selected_fields = 1; // SQL text filtering statement, similar to a WHERE clause in a query. @@ -80,6 +123,9 @@ message ReadSession { oneof output_format_serialization_options { // Optional. Options specific to the Apache Arrow output format. ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Options specific to the Apache Avro output format + AvroSerializationOptions avro_serialization_options = 4 [(google.api.field_behavior) = OPTIONAL]; } } diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 197d0abe8c4..bb841e4c0d3 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -501,6 +501,96 @@ export namespace google { public toJSON(): { [k: string]: any }; } + /** Properties of an AvroSerializationOptions. */ + interface IAvroSerializationOptions { + + /** AvroSerializationOptions enableDisplayNameAttribute */ + enableDisplayNameAttribute?: (boolean|null); + } + + /** Represents an AvroSerializationOptions. */ + class AvroSerializationOptions implements IAvroSerializationOptions { + + /** + * Constructs a new AvroSerializationOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions); + + /** AvroSerializationOptions enableDisplayNameAttribute. */ + public enableDisplayNameAttribute: boolean; + + /** + * Creates a new AvroSerializationOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSerializationOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @param message AvroSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @param message AvroSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Verifies an AvroSerializationOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSerializationOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. + * @param message AvroSerializationOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSerializationOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + /** Properties of a ProtoSchema. */ interface IProtoSchema { @@ -3382,6 +3472,9 @@ export namespace google { /** TableReadOptions arrowSerializationOptions */ arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + + /** TableReadOptions avroSerializationOptions */ + avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); } /** Represents a TableReadOptions. */ @@ -3402,8 +3495,11 @@ export namespace google { /** TableReadOptions arrowSerializationOptions. */ public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + /** TableReadOptions avroSerializationOptions. */ + public avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); + /** TableReadOptions outputFormatSerializationOptions. */ - public outputFormatSerializationOptions?: "arrowSerializationOptions"; + public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); /** * Creates a new TableReadOptions instance using the specified properties. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 32551b9ff33..5996556813f 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1145,6 +1145,193 @@ return AvroRows; })(); + v1.AvroSerializationOptions = (function() { + + /** + * Properties of an AvroSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroSerializationOptions + * @property {boolean|null} [enableDisplayNameAttribute] AvroSerializationOptions enableDisplayNameAttribute + */ + + /** + * Constructs a new AvroSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroSerializationOptions. + * @implements IAvroSerializationOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set + */ + function AvroSerializationOptions(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSerializationOptions enableDisplayNameAttribute. + * @member {boolean} enableDisplayNameAttribute + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @instance + */ + AvroSerializationOptions.prototype.enableDisplayNameAttribute = false; + + /** + * Creates a new AvroSerializationOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions instance + */ + AvroSerializationOptions.create = function create(properties) { + return new AvroSerializationOptions(properties); + }; + + /** + * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSerializationOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.enableDisplayNameAttribute != null && Object.hasOwnProperty.call(message, "enableDisplayNameAttribute")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.enableDisplayNameAttribute); + return writer; + }; + + /** + * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSerializationOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.enableDisplayNameAttribute = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSerializationOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSerializationOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSerializationOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) + if (typeof message.enableDisplayNameAttribute !== "boolean") + return "enableDisplayNameAttribute: boolean expected"; + return null; + }; + + /** + * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + */ + AvroSerializationOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); + if (object.enableDisplayNameAttribute != null) + message.enableDisplayNameAttribute = Boolean(object.enableDisplayNameAttribute); + return message; + }; + + /** + * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.AvroSerializationOptions} message AvroSerializationOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSerializationOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.enableDisplayNameAttribute = false; + if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) + object.enableDisplayNameAttribute = message.enableDisplayNameAttribute; + return object; + }; + + /** + * Converts this AvroSerializationOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @instance + * @returns {Object.} JSON object + */ + AvroSerializationOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return AvroSerializationOptions; + })(); + v1.ProtoSchema = (function() { /** @@ -7846,6 +8033,7 @@ * @property {Array.|null} [selectedFields] TableReadOptions selectedFields * @property {string|null} [rowRestriction] TableReadOptions rowRestriction * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions + * @property {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null} [avroSerializationOptions] TableReadOptions avroSerializationOptions */ /** @@ -7888,17 +8076,25 @@ */ TableReadOptions.prototype.arrowSerializationOptions = null; + /** + * TableReadOptions avroSerializationOptions. + * @member {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null|undefined} avroSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.avroSerializationOptions = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; /** * TableReadOptions outputFormatSerializationOptions. - * @member {"arrowSerializationOptions"|undefined} outputFormatSerializationOptions + * @member {"arrowSerializationOptions"|"avroSerializationOptions"|undefined} outputFormatSerializationOptions * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions * @instance */ Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { - get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions"]), + get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions", "avroSerializationOptions"]), set: $util.oneOfSetter($oneOfFields) }); @@ -7933,6 +8129,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.avroSerializationOptions != null && Object.hasOwnProperty.call(message, "avroSerializationOptions")) + $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.encode(message.avroSerializationOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -7978,6 +8176,9 @@ case 3: message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); break; + case 4: + message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -8032,6 +8233,16 @@ return "arrowSerializationOptions." + error; } } + if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { + if (properties.outputFormatSerializationOptions === 1) + return "outputFormatSerializationOptions: multiple values"; + properties.outputFormatSerializationOptions = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify(message.avroSerializationOptions); + if (error) + return "avroSerializationOptions." + error; + } + } return null; }; @@ -8061,6 +8272,11 @@ throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); } + if (object.avroSerializationOptions != null) { + if (typeof object.avroSerializationOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.avroSerializationOptions: object expected"); + message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.fromObject(object.avroSerializationOptions); + } return message; }; @@ -8093,6 +8309,11 @@ if (options.oneofs) object.outputFormatSerializationOptions = "arrowSerializationOptions"; } + if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { + object.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.toObject(message.avroSerializationOptions, options); + if (options.oneofs) + object.outputFormatSerializationOptions = "avroSerializationOptions"; + } return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 25abbfdca79..389ea6f3f92 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -83,6 +83,14 @@ } } }, + "AvroSerializationOptions": { + "fields": { + "enableDisplayNameAttribute": { + "type": "bool", + "id": 1 + } + } + }, "ProtoSchema": { "fields": { "protoDescriptor": { @@ -808,7 +816,8 @@ "oneofs": { "outputFormatSerializationOptions": { "oneof": [ - "arrowSerializationOptions" + "arrowSerializationOptions", + "avroSerializationOptions" ] } }, @@ -828,6 +837,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "avroSerializationOptions": { + "type": "AvroSerializationOptions", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } } From 715d8768751118eeba2ac3aa1c4ad6bf7ba1abfd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 19 Aug 2022 20:24:14 +0000 Subject: [PATCH 187/333] chore: remove unused proto imports (#286) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 468735472 Source-Link: https://github.com/googleapis/googleapis/commit/cfa1b3782da7ccae31673d45401a0b79d2d4a84b Source-Link: https://github.com/googleapis/googleapis-gen/commit/09b7666656510f5b00b893f003a0ba5766f9e250 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDliNzY2NjY1NjUxMGY1YjAwYjg5M2YwMDNhMGJhNTc2NmY5ZTI1MCJ9 --- .../google/cloud/bigquery/storage/v1beta1/table_reference.proto | 1 - 1 file changed, 1 deletion(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index 4269392f676..22c940c0e6b 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -16,7 +16,6 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; -import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; From 9f51ef4cd799729b784a4097e4ee49ebdfc03a95 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 23 Aug 2022 00:04:16 +0000 Subject: [PATCH 188/333] fix: better support for fallback mode (#287) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 468790263 Source-Link: https://github.com/googleapis/googleapis/commit/873ab456273d105245df0fb82a6c17a814553b80 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb6f37aeff2a3472e40a7bbace8c67d75e24bee5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2I2ZjM3YWVmZjJhMzQ3MmU0MGE3YmJhY2U4YzY3ZDc1ZTI0YmVlNSJ9 --- .../v1/big_query_read.create_read_session.js | 3 + .../generated/v1/big_query_read.read_rows.js | 3 + .../v1/big_query_read.split_read_stream.js | 3 + .../v1/big_query_write.append_rows.js | 3 + ..._query_write.batch_commit_write_streams.js | 3 + .../v1/big_query_write.create_write_stream.js | 3 + .../big_query_write.finalize_write_stream.js | 3 + .../v1/big_query_write.flush_rows.js | 3 + .../v1/big_query_write.get_write_stream.js | 3 + ...data.google.cloud.bigquery.storage.v1.json | 18 +- ...orage.batch_create_read_session_streams.js | 3 + .../big_query_storage.create_read_session.js | 3 + .../big_query_storage.finalize_stream.js | 3 + .../v1beta1/big_query_storage.read_rows.js | 3 + .../big_query_storage.split_read_stream.js | 3 + ...google.cloud.bigquery.storage.v1beta1.json | 10 +- .../src/v1/big_query_read_client.ts | 3 +- .../src/v1/big_query_write_client.ts | 3 +- .../src/v1beta1/big_query_storage_client.ts | 3 +- .../test/gapic_big_query_read_v1.ts | 156 ++++++++--------- .../test/gapic_big_query_storage_v1beta1.ts | 160 +++++++++--------- .../test/gapic_big_query_write_v1.ts | 158 ++++++++--------- 22 files changed, 303 insertions(+), 250 deletions(-) diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index 1f239c8f2e1..6cbaf38dcdf 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -23,6 +23,9 @@ function main(parent, readSession) { // [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index f5f781a46b8..30bea092495 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -23,6 +23,9 @@ function main(readStream) { // [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index 6e146955b95..a7d2a761348 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 9cefbb22166..24764fa1010 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -23,6 +23,9 @@ function main(writeStream) { // [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 9c258a30082..90cdefc4f94 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -23,6 +23,9 @@ function main(parent, writeStreams) { // [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index 839a8bf6628..e67550626ab 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -23,6 +23,9 @@ function main(parent, writeStream) { // [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index 3ff3da7adc8..85faa908822 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index 751fdebf6e0..ae6e56be382 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -23,6 +23,9 @@ function main(writeStream) { // [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 1650750820f..2d83ddbe23b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index ef8132bd613..1036b356fbb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 78, + "end": 81, "type": "FULL" } ], @@ -74,7 +74,7 @@ "segments": [ { "start": 25, - "end": 58, + "end": 61, "type": "FULL" } ], @@ -118,7 +118,7 @@ "segments": [ { "start": 25, - "end": 60, + "end": 63, "type": "FULL" } ], @@ -162,7 +162,7 @@ "segments": [ { "start": 25, - "end": 56, + "end": 59, "type": "FULL" } ], @@ -206,7 +206,7 @@ "segments": [ { "start": 25, - "end": 77, + "end": 80, "type": "FULL" } ], @@ -258,7 +258,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -298,7 +298,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -338,7 +338,7 @@ "segments": [ { "start": 25, - "end": 56, + "end": 59, "type": "FULL" } ], @@ -382,7 +382,7 @@ "segments": [ { "start": 25, - "end": 55, + "end": 58, "type": "FULL" } ], diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index 3d7b6d3e429..dd1d96f46b4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -23,6 +23,9 @@ function main(session, requestedStreams) { // [START bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index c71a464ce5d..3bf8d5ab0df 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -23,6 +23,9 @@ function main(tableReference, parent) { // [START bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index 51d63d0bb69..274b957673b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -23,6 +23,9 @@ function main(stream) { // [START bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index 65924b1746b..27981cfa6a3 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -23,6 +23,9 @@ function main(readPosition) { // [START bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index 24ae597d43a..79c048a8427 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -23,6 +23,9 @@ function main(originalStream) { // [START bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 58ffbbc5164..ac58a9c66c9 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 83, + "end": 86, "type": "FULL" } ], @@ -86,7 +86,7 @@ "segments": [ { "start": 25, - "end": 54, + "end": 57, "type": "FULL" } ], @@ -126,7 +126,7 @@ "segments": [ { "start": 25, - "end": 58, + "end": 61, "type": "FULL" } ], @@ -170,7 +170,7 @@ "segments": [ { "start": 25, - "end": 50, + "end": 53, "type": "FULL" } ], @@ -210,7 +210,7 @@ "segments": [ { "start": 25, - "end": 60, + "end": 63, "type": "FULL" } ], diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 671bb00f297..81c81a81d68 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -272,7 +272,8 @@ export class BigQueryReadClient { const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - descriptor + descriptor, + this._opts.fallback ); this.innerApiCalls[methodName] = apiCall; diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 1e0569e1f7e..02021a6bd26 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -278,7 +278,8 @@ export class BigQueryWriteClient { const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - descriptor + descriptor, + this._opts.fallback ); this.innerApiCalls[methodName] = apiCall; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index e1c27a9e683..a6a6d6c9596 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -269,7 +269,8 @@ export class BigQueryStorageClient { const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - descriptor + descriptor, + this._opts.fallback ); this.innerApiCalls[methodName] = apiCall; diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 793ecd88c3e..18113a85423 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -73,99 +73,101 @@ function stubServerStreamingCall( } describe('v1.BigQueryReadClient', () => { - it('has servicePath', () => { - const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigqueryreadModule.v1.BigQueryReadClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient(); - assert(client); - }); + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; + assert(servicePath); + }); - it('should create a client with gRPC fallback', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - fallback: true, + it('has apiEndpoint', () => { + const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; + assert(apiEndpoint); }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has port', () => { + const port = bigqueryreadModule.v1.BigQueryReadClient.port; + assert(port); + assert(typeof port === 'number'); }); - assert.strictEqual(client.bigQueryReadStub, undefined); - await client.initialize(); - assert(client.bigQueryReadStub); - }); - it('has close method for the initialized client', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with no option', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + assert(client); }); - client.initialize(); - assert(client.bigQueryReadStub); - client.close().then(() => { - done(); + + it('should create a client with gRPC fallback', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + fallback: true, + }); + assert(client); }); - }); - it('has close method for the non-initialized client', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has initialize method and supports deferred initialization', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + await client.initialize(); + assert(client.bigQueryReadStub); }); - assert.strictEqual(client.bigQueryReadStub, undefined); - client.close().then(() => { - done(); + + it('has close method for the initialized client', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryReadStub); + client.close().then(() => { + done(); + }); }); - }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has close method for the non-initialized client', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + client.close().then(() => { + done(); + }); }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); }); describe('createReadSession', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 005e54894e1..7e38e445bce 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -73,101 +73,103 @@ function stubServerStreamingCall( } describe('v1beta1.BigQueryStorageClient', () => { - it('has servicePath', () => { - const servicePath = - bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; - assert(port); - assert(typeof port === 'number'); - }); + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; + assert(servicePath); + }); - it('should create a client with no option', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); - assert(client); - }); + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; + assert(apiEndpoint); + }); - it('should create a client with gRPC fallback', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - fallback: true, + it('has port', () => { + const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; + assert(port); + assert(typeof port === 'number'); }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with no option', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + assert(client); }); - assert.strictEqual(client.bigQueryStorageStub, undefined); - await client.initialize(); - assert(client.bigQueryStorageStub); - }); - it('has close method for the initialized client', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with gRPC fallback', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + fallback: true, + }); + assert(client); }); - client.initialize(); - assert(client.bigQueryStorageStub); - client.close().then(() => { - done(); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + await client.initialize(); + assert(client.bigQueryStorageStub); }); - }); - it('has close method for the non-initialized client', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has close method for the initialized client', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryStorageStub); + client.close().then(() => { + done(); + }); }); - assert.strictEqual(client.bigQueryStorageStub, undefined); - client.close().then(() => { - done(); + + it('has close method for the non-initialized client', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + client.close().then(() => { + done(); + }); }); - }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); }); describe('createReadSession', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index 44fdb98de72..c27c4dfd91f 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -66,99 +66,103 @@ function stubBidiStreamingCall( } describe('v1.BigQueryWriteClient', () => { - it('has servicePath', () => { - const servicePath = bigquerywriteModule.v1.BigQueryWriteClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigquerywriteModule.v1.BigQueryWriteClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient(); - assert(client); - }); + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + bigquerywriteModule.v1.BigQueryWriteClient.servicePath; + assert(servicePath); + }); - it('should create a client with gRPC fallback', () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - fallback: true, + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; + assert(apiEndpoint); }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has port', () => { + const port = bigquerywriteModule.v1.BigQueryWriteClient.port; + assert(port); + assert(typeof port === 'number'); }); - assert.strictEqual(client.bigQueryWriteStub, undefined); - await client.initialize(); - assert(client.bigQueryWriteStub); - }); - it('has close method for the initialized client', done => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with no option', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + assert(client); }); - client.initialize(); - assert(client.bigQueryWriteStub); - client.close().then(() => { - done(); + + it('should create a client with gRPC fallback', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + fallback: true, + }); + assert(client); }); - }); - it('has close method for the non-initialized client', done => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + await client.initialize(); + assert(client.bigQueryWriteStub); }); - assert.strictEqual(client.bigQueryWriteStub, undefined); - client.close().then(() => { - done(); + + it('has close method for the initialized client', done => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryWriteStub); + client.close().then(() => { + done(); + }); }); - }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has close method for the non-initialized client', done => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + client.close().then(() => { + done(); + }); }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); }); describe('createWriteStream', () => { From 152cad3ad24c8a8baac089cc7a23ac7a11c22eaa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 23 Aug 2022 07:38:17 +0000 Subject: [PATCH 189/333] fix: change import long to require (#289) Source-Link: https://github.com/googleapis/synthtool/commit/d229a1258999f599a90a9b674a1c5541e00db588 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:74ab2b3c71ef27e6d8b69b1d0a0c9d31447777b79ac3cd4be82c265b45f37e5e --- .../.github/.OwlBot.lock.yaml | 4 +- .../bigquery-storage/protos/protos.d.ts | 779 ++- handwritten/bigquery-storage/protos/protos.js | 4194 ++++++++++++----- .../bigquery-storage/protos/protos.json | 24 + 4 files changed, 3869 insertions(+), 1132 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index f3ca5561cb5..f7c796c60cd 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:ddb19a6df6c1fa081bc99fb29658f306dd64668bc26f75d1353b28296f3a78e6 -# created: 2022-06-07T21:18:30.024751809Z + digest: sha256:74ab2b3c71ef27e6d8b69b1d0a0c9d31447777b79ac3cd4be82c265b45f37e5e +# created: 2022-08-22T22:07:00.791732705Z diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index bb841e4c0d3..2cfa3409b32 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as Long from "long"; +import Long = require("long"); import {protobuf as $protobuf} from "google-gax"; /** Namespace google. */ export namespace google { @@ -117,6 +117,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an ArrowRecordBatch. */ @@ -213,6 +220,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowRecordBatch + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an ArrowSerializationOptions. */ @@ -303,6 +317,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSerializationOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace ArrowSerializationOptions { @@ -403,6 +424,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an AvroRows. */ @@ -499,6 +527,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an AvroSerializationOptions. */ @@ -589,6 +624,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSerializationOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ProtoSchema. */ @@ -679,6 +721,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ProtoRows. */ @@ -769,6 +818,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Represents a BigQueryRead */ @@ -837,21 +893,21 @@ export namespace google { namespace BigQueryRead { /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#createReadSession}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. * @param error Error, if any * @param [response] ReadSession */ type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadSession) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#readRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. * @param error Error, if any * @param [response] ReadRowsResponse */ type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadRowsResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#splitReadStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. * @param error Error, if any * @param [response] SplitReadStreamResponse */ @@ -966,42 +1022,42 @@ export namespace google { namespace BigQueryWrite { /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#createWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. * @param error Error, if any * @param [response] WriteStream */ type CreateWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#appendRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. * @param error Error, if any * @param [response] AppendRowsResponse */ type AppendRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.AppendRowsResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#getWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. * @param error Error, if any * @param [response] WriteStream */ type GetWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#finalizeWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. * @param error Error, if any * @param [response] FinalizeWriteStreamResponse */ type FinalizeWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#batchCommitWriteStreams}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. * @param error Error, if any * @param [response] BatchCommitWriteStreamsResponse */ type BatchCommitWriteStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#flushRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. * @param error Error, if any * @param [response] FlushRowsResponse */ @@ -1114,6 +1170,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ReadRowsRequest. */ @@ -1210,6 +1273,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ThrottleState. */ @@ -1300,6 +1370,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ThrottleState + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a StreamStats. */ @@ -1390,6 +1467,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamStats + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace StreamStats { @@ -1488,6 +1572,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Progress + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -1621,6 +1712,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a SplitReadStreamRequest. */ @@ -1717,6 +1815,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a SplitReadStreamResponse. */ @@ -1813,6 +1918,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a CreateWriteStreamRequest. */ @@ -1909,6 +2021,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an AppendRowsRequest. */ @@ -2020,6 +2139,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace AppendRowsRequest { @@ -2118,6 +2244,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoData + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -2236,6 +2369,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace AppendRowsResponse { @@ -2328,6 +2468,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendResult + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -2419,6 +2566,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for GetWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BatchCommitWriteStreamsRequest. */ @@ -2515,6 +2669,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCommitWriteStreamsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BatchCommitWriteStreamsResponse. */ @@ -2611,6 +2772,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCommitWriteStreamsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FinalizeWriteStreamRequest. */ @@ -2701,6 +2869,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FinalizeWriteStreamResponse. */ @@ -2791,6 +2966,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeWriteStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FlushRowsRequest. */ @@ -2887,6 +3069,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FlushRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FlushRowsResponse. */ @@ -2977,6 +3166,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FlushRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a StorageError. */ @@ -3079,6 +3275,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StorageError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace StorageError { @@ -3198,6 +3401,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for RowError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace RowError { @@ -3367,6 +3577,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadSession + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace ReadSession { @@ -3459,6 +3676,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableModifiers + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a TableReadOptions. */ @@ -3570,6 +3794,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReadOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -3661,6 +3892,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a WriteStream. */ @@ -3781,6 +4019,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for WriteStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace WriteStream { @@ -3888,6 +4133,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a TableFieldSchema. */ @@ -4020,6 +4272,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableFieldSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace TableFieldSchema { @@ -4145,6 +4404,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an ArrowRecordBatch. */ @@ -4241,6 +4507,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowRecordBatch + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an AvroSchema. */ @@ -4331,6 +4604,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an AvroRows. */ @@ -4427,6 +4707,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a TableReadOptions. */ @@ -4523,6 +4810,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReadOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Represents a BigQueryStorage */ @@ -4619,35 +4913,35 @@ export namespace google { namespace BigQueryStorage { /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. * @param error Error, if any * @param [response] ReadSession */ type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. * @param error Error, if any * @param [response] ReadRowsResponse */ type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. * @param error Error, if any * @param [response] BatchCreateReadSessionStreamsResponse */ type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. * @param error Error, if any * @param [response] Empty */ type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. * @param error Error, if any * @param [response] SplitReadStreamResponse */ @@ -4742,6 +5036,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Stream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a StreamPosition. */ @@ -4838,6 +5139,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamPosition + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ReadSession. */ @@ -4973,6 +5281,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadSession + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a CreateReadSessionRequest. */ @@ -5099,6 +5414,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** DataFormat enum. */ @@ -5203,6 +5525,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a StreamStatus. */ @@ -5311,6 +5640,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamStatus + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a Progress. */ @@ -5407,6 +5743,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Progress + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ThrottleStatus. */ @@ -5497,6 +5840,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ThrottleStatus + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ReadRowsResponse. */ @@ -5614,6 +5964,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BatchCreateReadSessionStreamsRequest. */ @@ -5710,6 +6067,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BatchCreateReadSessionStreamsResponse. */ @@ -5800,6 +6164,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FinalizeStreamRequest. */ @@ -5890,6 +6261,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a SplitReadStreamRequest. */ @@ -5986,6 +6364,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a SplitReadStreamResponse. */ @@ -6082,6 +6467,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a TableReference. */ @@ -6184,6 +6576,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReference + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a TableModifiers. */ @@ -6274,6 +6673,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableModifiers + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } } @@ -6371,6 +6777,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileDescriptorSet + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FileDescriptorProto. */ @@ -6411,6 +6824,9 @@ export namespace google { /** FileDescriptorProto syntax */ syntax?: (string|null); + + /** FileDescriptorProto edition */ + edition?: (string|null); } /** Represents a FileDescriptorProto. */ @@ -6458,6 +6874,9 @@ export namespace google { /** FileDescriptorProto syntax. */ public syntax: string; + /** FileDescriptorProto edition. */ + public edition: string; + /** * Creates a new FileDescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -6527,6 +6946,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a DescriptorProto. */ @@ -6671,6 +7097,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace DescriptorProto { @@ -6775,6 +7208,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExtensionRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ReservedRange. */ @@ -6871,6 +7311,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReservedRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -6962,6 +7409,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExtensionRangeOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FieldDescriptorProto. */ @@ -7112,6 +7566,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace FieldDescriptorProto { @@ -7240,6 +7701,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for OneofDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an EnumDescriptorProto. */ @@ -7354,6 +7822,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace EnumDescriptorProto { @@ -7452,6 +7927,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumReservedRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -7555,6 +8037,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumValueDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ServiceDescriptorProto. */ @@ -7657,6 +8146,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ServiceDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a MethodDescriptorProto. */ @@ -7777,6 +8273,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MethodDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FileOptions. */ @@ -7990,6 +8493,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace FileOptions { @@ -8117,6 +8627,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MessageOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FieldOptions. */ @@ -8134,6 +8651,9 @@ export namespace google { /** FieldOptions lazy */ lazy?: (boolean|null); + /** FieldOptions unverifiedLazy */ + unverifiedLazy?: (boolean|null); + /** FieldOptions deprecated */ deprecated?: (boolean|null); @@ -8171,6 +8691,9 @@ export namespace google { /** FieldOptions lazy. */ public lazy: boolean; + /** FieldOptions unverifiedLazy. */ + public unverifiedLazy: boolean; + /** FieldOptions deprecated. */ public deprecated: boolean; @@ -8249,6 +8772,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace FieldOptions { @@ -8356,6 +8886,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for OneofOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an EnumOptions. */ @@ -8458,6 +8995,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an EnumValueOptions. */ @@ -8554,6 +9098,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumValueOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a ServiceOptions. */ @@ -8656,6 +9207,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ServiceOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a MethodOptions. */ @@ -8764,6 +9322,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MethodOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace MethodOptions { @@ -8900,6 +9465,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UninterpretedOption + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace UninterpretedOption { @@ -8998,6 +9570,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for NamePart + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -9089,6 +9668,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SourceCodeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace SourceCodeInfo { @@ -9205,6 +9791,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Location + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -9296,6 +9889,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for GeneratedCodeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace GeneratedCodeInfo { @@ -9314,6 +9914,9 @@ export namespace google { /** Annotation end */ end?: (number|null); + + /** Annotation semantic */ + semantic?: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null); } /** Represents an Annotation. */ @@ -9337,6 +9940,9 @@ export namespace google { /** Annotation end. */ public end: number; + /** Annotation semantic. */ + public semantic: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic); + /** * Creates a new Annotation instance using the specified properties. * @param [properties] Properties to set @@ -9406,6 +10012,23 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Annotation + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace Annotation { + + /** Semantic enum. */ + enum Semantic { + NONE = 0, + SET = 1, + ALIAS = 2 + } } } @@ -9503,6 +10126,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Timestamp + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a DoubleValue. */ @@ -9593,6 +10223,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DoubleValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a FloatValue. */ @@ -9683,6 +10320,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FloatValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an Int64Value. */ @@ -9773,6 +10417,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Int64Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a UInt64Value. */ @@ -9863,6 +10514,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UInt64Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an Int32Value. */ @@ -9953,6 +10611,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Int32Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a UInt32Value. */ @@ -10043,6 +10708,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UInt32Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BoolValue. */ @@ -10133,6 +10805,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BoolValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a StringValue. */ @@ -10223,6 +10902,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StringValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a BytesValue. */ @@ -10313,6 +10999,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BytesValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an Any. */ @@ -10409,6 +11102,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Any + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an Empty. */ @@ -10493,6 +11193,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Empty + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -10593,6 +11300,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Http + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a HttpRule. */ @@ -10740,6 +11454,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for HttpRule + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a CustomHttpPattern. */ @@ -10836,6 +11557,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CustomHttpPattern + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** FieldBehavior enum. */ @@ -10974,6 +11702,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ResourceDescriptor + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } namespace ResourceDescriptor { @@ -11086,6 +11821,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ResourceReference + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } @@ -11192,6 +11934,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Status + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } } } diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 5996556813f..542a50d85f2 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -167,9 +167,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedSchema = reader.bytes(); - break; + case 1: { + message.serializedSchema = reader.bytes(); + break; + } default: reader.skipType(tag & 7); break; @@ -226,7 +227,7 @@ if (object.serializedSchema != null) if (typeof object.serializedSchema === "string") $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); - else if (object.serializedSchema.length) + else if (object.serializedSchema.length >= 0) message.serializedSchema = object.serializedSchema; return message; }; @@ -268,6 +269,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ArrowSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSchema"; + }; + return ArrowSchema; })(); @@ -374,12 +390,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedRecordBatch = reader.bytes(); - break; - case 2: - message.rowCount = reader.int64(); - break; + case 1: { + message.serializedRecordBatch = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -439,7 +457,7 @@ if (object.serializedRecordBatch != null) if (typeof object.serializedRecordBatch === "string") $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); - else if (object.serializedRecordBatch.length) + else if (object.serializedRecordBatch.length >= 0) message.serializedRecordBatch = object.serializedRecordBatch; if (object.rowCount != null) if ($util.Long) @@ -501,6 +519,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ArrowRecordBatch + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowRecordBatch"; + }; + return ArrowRecordBatch; })(); @@ -596,9 +629,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.bufferCompression = reader.int32(); - break; + case 2: { + message.bufferCompression = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -706,6 +740,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ArrowSerializationOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSerializationOptions"; + }; + /** * CompressionCodec enum. * @name google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec @@ -817,9 +866,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.schema = reader.string(); - break; + case 1: { + message.schema = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -909,6 +959,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AvroSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSchema"; + }; + return AvroSchema; })(); @@ -1015,12 +1080,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedBinaryRows = reader.bytes(); - break; - case 2: - message.rowCount = reader.int64(); - break; + case 1: { + message.serializedBinaryRows = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -1080,7 +1147,7 @@ if (object.serializedBinaryRows != null) if (typeof object.serializedBinaryRows === "string") $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); - else if (object.serializedBinaryRows.length) + else if (object.serializedBinaryRows.length >= 0) message.serializedBinaryRows = object.serializedBinaryRows; if (object.rowCount != null) if ($util.Long) @@ -1142,6 +1209,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AvroRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroRows"; + }; + return AvroRows; })(); @@ -1237,9 +1319,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.enableDisplayNameAttribute = reader.bool(); - break; + case 1: { + message.enableDisplayNameAttribute = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -1329,6 +1412,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AvroSerializationOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSerializationOptions"; + }; + return AvroSerializationOptions; })(); @@ -1424,9 +1522,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); - break; + case 1: { + message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -1521,6 +1620,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ProtoSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoSchema"; + }; + return ProtoSchema; })(); @@ -1618,11 +1732,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.serializedRows && message.serializedRows.length)) - message.serializedRows = []; - message.serializedRows.push(reader.bytes()); - break; + case 1: { + if (!(message.serializedRows && message.serializedRows.length)) + message.serializedRows = []; + message.serializedRows.push(reader.bytes()); + break; + } default: reader.skipType(tag & 7); break; @@ -1687,7 +1802,7 @@ for (var i = 0; i < object.serializedRows.length; ++i) if (typeof object.serializedRows[i] === "string") $util.base64.decode(object.serializedRows[i], message.serializedRows[i] = $util.newBuffer($util.base64.length(object.serializedRows[i])), 0); - else if (object.serializedRows[i].length) + else if (object.serializedRows[i].length >= 0) message.serializedRows[i] = object.serializedRows[i]; } return message; @@ -1727,6 +1842,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ProtoRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoRows"; + }; + return ProtoRows; })(); @@ -1763,7 +1893,7 @@ }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#createReadSession}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. * @memberof google.cloud.bigquery.storage.v1.BigQueryRead * @typedef CreateReadSessionCallback * @type {function} @@ -1796,7 +1926,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#readRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. * @memberof google.cloud.bigquery.storage.v1.BigQueryRead * @typedef ReadRowsCallback * @type {function} @@ -1829,7 +1959,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead#splitReadStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. * @memberof google.cloud.bigquery.storage.v1.BigQueryRead * @typedef SplitReadStreamCallback * @type {function} @@ -1897,7 +2027,7 @@ }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#createWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef CreateWriteStreamCallback * @type {function} @@ -1930,7 +2060,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#appendRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef AppendRowsCallback * @type {function} @@ -1963,7 +2093,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#getWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef GetWriteStreamCallback * @type {function} @@ -1996,7 +2126,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#finalizeWriteStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef FinalizeWriteStreamCallback * @type {function} @@ -2029,7 +2159,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#batchCommitWriteStreams}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef BatchCommitWriteStreamsCallback * @type {function} @@ -2062,7 +2192,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite#flushRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite * @typedef FlushRowsCallback * @type {function} @@ -2222,18 +2352,22 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.parent = reader.string(); - break; - case 2: - message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.decode(reader, reader.uint32()); - break; - case 3: - message.maxStreamCount = reader.int32(); - break; - case 4: - message.preferredMinStreamCount = reader.int32(); - break; + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.decode(reader, reader.uint32()); + break; + } + case 3: { + message.maxStreamCount = reader.int32(); + break; + } + case 4: { + message.preferredMinStreamCount = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -2353,6 +2487,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for CreateReadSessionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateReadSessionRequest"; + }; + return CreateReadSessionRequest; })(); @@ -2459,12 +2608,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.readStream = reader.string(); - break; - case 2: - message.offset = reader.int64(); - break; + case 1: { + message.readStream = reader.string(); + break; + } + case 2: { + message.offset = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -2577,6 +2728,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsRequest"; + }; + return ReadRowsRequest; })(); @@ -2672,9 +2838,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.throttlePercent = reader.int32(); - break; + case 1: { + message.throttlePercent = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -2764,6 +2931,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ThrottleState + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ThrottleState.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ThrottleState"; + }; + return ThrottleState; })(); @@ -2859,9 +3041,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); - break; + case 2: { + message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -2956,6 +3139,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for StreamStats + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamStats.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats"; + }; + StreamStats.Progress = (function() { /** @@ -3059,12 +3257,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.atResponseStart = reader.double(); - break; - case 2: - message.atResponseEnd = reader.double(); - break; + case 1: { + message.atResponseStart = reader.double(); + break; + } + case 2: { + message.atResponseEnd = reader.double(); + break; + } default: reader.skipType(tag & 7); break; @@ -3163,6 +3363,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Progress + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats.Progress"; + }; + return Progress; })(); @@ -3352,27 +3567,34 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 3: - message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); - break; - case 4: - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); - break; - case 6: - message.rowCount = reader.int64(); - break; - case 2: - message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.decode(reader, reader.uint32()); - break; - case 5: - message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); - break; - case 7: - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); - break; - case 8: - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); - break; + case 3: { + message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); + break; + } + case 4: { + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + } + case 6: { + message.rowCount = reader.int64(); + break; + } + case 2: { + message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.decode(reader, reader.uint32()); + break; + } + case 5: { + message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); + break; + } + case 7: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 8: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -3580,6 +3802,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsResponse"; + }; + return ReadRowsResponse; })(); @@ -3686,12 +3923,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.fraction = reader.double(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.fraction = reader.double(); + break; + } default: reader.skipType(tag & 7); break; @@ -3790,6 +4029,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for SplitReadStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamRequest"; + }; + return SplitReadStreamRequest; })(); @@ -3896,12 +4150,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); - break; - case 2: - message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); - break; + case 1: { + message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -4010,6 +4266,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for SplitReadStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamResponse"; + }; + return SplitReadStreamResponse; })(); @@ -4116,12 +4387,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.parent = reader.string(); - break; - case 2: - message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.decode(reader, reader.uint32()); - break; + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -4225,6 +4498,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for CreateWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateWriteStreamRequest"; + }; + return CreateWriteStreamRequest; })(); @@ -4367,21 +4655,25 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.writeStream = reader.string(); - break; - case 2: - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; - case 4: - message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); - break; - case 6: - message.traceId = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; + case 1: { + message.writeStream = reader.string(); + break; + } + case 2: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } + case 4: { + message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); + break; + } + case 6: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; } } return message; @@ -4509,6 +4801,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AppendRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest"; + }; + AppendRowsRequest.ProtoData = (function() { /** @@ -4612,12 +4919,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); - break; - case 2: - message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.decode(reader, reader.uint32()); - break; + case 1: { + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); + break; + } + case 2: { + message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -4726,6 +5035,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ProtoData + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"; + }; + return ProtoData; })(); @@ -4884,23 +5208,28 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); - break; - case 2: - message.error = $root.google.rpc.Status.decode(reader, reader.uint32()); - break; - case 3: - message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); - break; - case 4: - if (!(message.rowErrors && message.rowErrors.length)) - message.rowErrors = []; - message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); - break; - case 5: - message.writeStream = reader.string(); - break; + case 1: { + message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); + break; + } + case 2: { + message.error = $root.google.rpc.Status.decode(reader, reader.uint32()); + break; + } + case 3: { + message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.rowErrors && message.rowErrors.length)) + message.rowErrors = []; + message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); + break; + } + case 5: { + message.writeStream = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -5069,6 +5398,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AppendRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse"; + }; + AppendRowsResponse.AppendResult = (function() { /** @@ -5161,9 +5505,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; + case 1: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -5258,6 +5603,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AppendResult + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendResult.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"; + }; + return AppendResult; })(); @@ -5356,9 +5716,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; + case 1: { + message.name = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -5448,6 +5809,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for GetWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + GetWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.GetWriteStreamRequest"; + }; + return GetWriteStreamRequest; })(); @@ -5556,14 +5932,16 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.parent = reader.string(); - break; - case 2: - if (!(message.writeStreams && message.writeStreams.length)) - message.writeStreams = []; - message.writeStreams.push(reader.string()); - break; + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.writeStreams && message.writeStreams.length)) + message.writeStreams = []; + message.writeStreams.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -5674,6 +6052,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BatchCommitWriteStreamsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCommitWriteStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest"; + }; + return BatchCommitWriteStreamsRequest; })(); @@ -5782,14 +6175,16 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 2: - if (!(message.streamErrors && message.streamErrors.length)) - message.streamErrors = []; - message.streamErrors.push($root.google.cloud.bigquery.storage.v1.StorageError.decode(reader, reader.uint32())); - break; + case 1: { + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + if (!(message.streamErrors && message.streamErrors.length)) + message.streamErrors = []; + message.streamErrors.push($root.google.cloud.bigquery.storage.v1.StorageError.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -5910,6 +6305,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BatchCommitWriteStreamsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCommitWriteStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse"; + }; + return BatchCommitWriteStreamsResponse; })(); @@ -6005,9 +6415,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; + case 1: { + message.name = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -6097,6 +6508,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FinalizeWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest"; + }; + return FinalizeWriteStreamRequest; })(); @@ -6192,9 +6618,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.rowCount = reader.int64(); - break; + case 1: { + message.rowCount = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -6298,6 +6725,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FinalizeWriteStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeWriteStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse"; + }; + return FinalizeWriteStreamResponse; })(); @@ -6404,12 +6846,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.writeStream = reader.string(); - break; - case 2: - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; + case 1: { + message.writeStream = reader.string(); + break; + } + case 2: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -6513,6 +6957,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FlushRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FlushRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsRequest"; + }; + return FlushRowsRequest; })(); @@ -6608,9 +7067,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.offset = reader.int64(); - break; + case 1: { + message.offset = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -6714,6 +7174,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FlushRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FlushRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsResponse"; + }; + return FlushRowsResponse; })(); @@ -6831,15 +7306,18 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.code = reader.int32(); - break; - case 2: - message.entity = reader.string(); - break; - case 3: - message.errorMessage = reader.string(); - break; + case 1: { + message.code = reader.int32(); + break; + } + case 2: { + message.entity = reader.string(); + break; + } + case 3: { + message.errorMessage = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -6999,6 +7477,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for StorageError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StorageError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StorageError"; + }; + /** * StorageErrorCode enum. * @name google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode @@ -7146,15 +7639,18 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.index = reader.int64(); - break; - case 2: - message.code = reader.int32(); - break; - case 3: - message.message = reader.string(); - break; + case 1: { + message.index = reader.int64(); + break; + } + case 2: { + message.code = reader.int32(); + break; + } + case 3: { + message.message = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -7288,6 +7784,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for RowError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + RowError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.RowError"; + }; + /** * RowErrorCode enum. * @name google.cloud.bigquery.storage.v1.RowError.RowErrorCode @@ -7539,41 +8050,52 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 3: - message.dataFormat = reader.int32(); - break; - case 4: - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); - break; - case 5: - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); - break; - case 6: - message.table = reader.string(); - break; - case 7: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); - break; - case 8: - message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); - break; - case 10: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); - break; - case 12: - message.estimatedTotalBytesScanned = reader.int64(); - break; - case 13: - message.traceId = reader.string(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 3: { + message.dataFormat = reader.int32(); + break; + } + case 4: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 5: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + case 6: { + message.table = reader.string(); + break; + } + case 7: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 8: { + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); + break; + } + case 10: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); + break; + } + case 12: { + message.estimatedTotalBytesScanned = reader.int64(); + break; + } + case 13: { + message.traceId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -7832,6 +8354,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadSession + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession"; + }; + ReadSession.TableModifiers = (function() { /** @@ -7924,9 +8461,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; + case 1: { + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -8021,6 +8559,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableModifiers + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"; + }; + return TableModifiers; })(); @@ -8165,20 +8718,24 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); - break; - case 2: - message.rowRestriction = reader.string(); - break; - case 3: - message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); - break; - case 4: - message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); - break; + case 1: { + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + } + case 2: { + message.rowRestriction = reader.string(); + break; + } + case 3: { + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); + break; + } + case 4: { + message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -8328,6 +8885,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableReadOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"; + }; + return TableReadOptions; })(); @@ -8426,9 +8998,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; + case 1: { + message.name = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -8518,6 +9091,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadStream"; + }; + return ReadStream; })(); @@ -8668,24 +9256,30 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.type = reader.int32(); - break; - case 3: - message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 4: - message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 5: - message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); - break; - case 7: - message.writeMode = reader.int32(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.int32(); + break; + } + case 3: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 4: { + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 5: { + message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); + break; + } + case 7: { + message.writeMode = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -8867,6 +9461,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for WriteStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + WriteStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.WriteStream"; + }; + /** * Type enum. * @name google.cloud.bigquery.storage.v1.WriteStream.Type @@ -8996,11 +9605,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.fields && message.fields.length)) - message.fields = []; - message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); - break; + case 1: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -9107,6 +9717,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableSchema"; + }; + return TableSchema; })(); @@ -9281,32 +9906,40 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.type = reader.int32(); - break; - case 3: - message.mode = reader.int32(); - break; - case 4: - if (!(message.fields && message.fields.length)) - message.fields = []; - message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); - break; - case 6: - message.description = reader.string(); - break; - case 7: - message.maxLength = reader.int64(); - break; - case 8: - message.precision = reader.int64(); - break; - case 9: - message.scale = reader.int64(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.int32(); + break; + } + case 3: { + message.mode = reader.int32(); + break; + } + case 4: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); + break; + } + case 6: { + message.description = reader.string(); + break; + } + case 7: { + message.maxLength = reader.int64(); + break; + } + case 8: { + message.precision = reader.int64(); + break; + } + case 9: { + message.scale = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -9619,6 +10252,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableFieldSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableFieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableFieldSchema"; + }; + /** * Type enum. * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Type @@ -9786,9 +10434,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedSchema = reader.bytes(); - break; + case 1: { + message.serializedSchema = reader.bytes(); + break; + } default: reader.skipType(tag & 7); break; @@ -9845,7 +10494,7 @@ if (object.serializedSchema != null) if (typeof object.serializedSchema === "string") $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); - else if (object.serializedSchema.length) + else if (object.serializedSchema.length >= 0) message.serializedSchema = object.serializedSchema; return message; }; @@ -9887,6 +10536,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ArrowSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowSchema"; + }; + return ArrowSchema; })(); @@ -9993,12 +10657,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedRecordBatch = reader.bytes(); - break; - case 2: - message.rowCount = reader.int64(); - break; + case 1: { + message.serializedRecordBatch = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -10058,7 +10724,7 @@ if (object.serializedRecordBatch != null) if (typeof object.serializedRecordBatch === "string") $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); - else if (object.serializedRecordBatch.length) + else if (object.serializedRecordBatch.length >= 0) message.serializedRecordBatch = object.serializedRecordBatch; if (object.rowCount != null) if ($util.Long) @@ -10120,6 +10786,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ArrowRecordBatch + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch"; + }; + return ArrowRecordBatch; })(); @@ -10215,9 +10896,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.schema = reader.string(); - break; + case 1: { + message.schema = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -10307,6 +10989,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AvroSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroSchema"; + }; + return AvroSchema; })(); @@ -10413,12 +11110,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.serializedBinaryRows = reader.bytes(); - break; - case 2: - message.rowCount = reader.int64(); - break; + case 1: { + message.serializedBinaryRows = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -10478,7 +11177,7 @@ if (object.serializedBinaryRows != null) if (typeof object.serializedBinaryRows === "string") $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); - else if (object.serializedBinaryRows.length) + else if (object.serializedBinaryRows.length >= 0) message.serializedBinaryRows = object.serializedBinaryRows; if (object.rowCount != null) if ($util.Long) @@ -10540,6 +11239,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for AvroRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroRows"; + }; + return AvroRows; })(); @@ -10648,14 +11362,16 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); - break; - case 2: - message.rowRestriction = reader.string(); - break; + case 1: { + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + } + case 2: { + message.rowRestriction = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -10766,6 +11482,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableReadOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReadOptions"; + }; + return TableReadOptions; })(); @@ -10802,7 +11533,7 @@ }; /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#createReadSession}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @typedef CreateReadSessionCallback * @type {function} @@ -10835,7 +11566,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#readRows}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @typedef ReadRowsCallback * @type {function} @@ -10868,7 +11599,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#batchCreateReadSessionStreams}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @typedef BatchCreateReadSessionStreamsCallback * @type {function} @@ -10901,7 +11632,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#finalizeStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @typedef FinalizeStreamCallback * @type {function} @@ -10934,7 +11665,7 @@ */ /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage#splitReadStream}. + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage * @typedef SplitReadStreamCallback * @type {function} @@ -11061,9 +11792,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; + case 1: { + message.name = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -11153,6 +11885,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Stream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Stream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Stream"; + }; + return Stream; })(); @@ -11259,12 +12006,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - case 2: - message.offset = reader.int64(); - break; + case 1: { + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.offset = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -11382,6 +12131,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for StreamPosition + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamPosition.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamPosition"; + }; + return StreamPosition; })(); @@ -11570,32 +12334,40 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - case 5: - message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); - break; - case 6: - message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); - break; - case 4: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); - break; - case 7: - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - case 8: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); - break; - case 9: - message.shardingStrategy = reader.int32(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 5: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 6: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + } + case 7: { + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + } + case 8: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 9: { + message.shardingStrategy = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -11816,6 +12588,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadSession + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadSession"; + }; + return ReadSession; })(); @@ -11977,29 +12764,36 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - case 6: - message.parent = reader.string(); - break; - case 2: - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); - break; - case 3: - message.requestedStreams = reader.int32(); - break; - case 4: - message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); - break; - case 5: - message.format = reader.int32(); - break; - case 7: - message.shardingStrategy = reader.int32(); - break; - default: - reader.skipType(tag & 7); + case 1: { + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + } + case 6: { + message.parent = reader.string(); + break; + } + case 2: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 3: { + message.requestedStreams = reader.int32(); + break; + } + case 4: { + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); + break; + } + case 5: { + message.format = reader.int32(); + break; + } + case 7: { + message.shardingStrategy = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); break; } } @@ -12187,6 +12981,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for CreateReadSessionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest"; + }; + return CreateReadSessionRequest; })(); @@ -12314,9 +13123,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); - break; + case 1: { + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -12411,6 +13221,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsRequest"; + }; + return ReadRowsRequest; })(); @@ -12539,18 +13364,22 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.estimatedRowCount = reader.int64(); - break; - case 2: - message.fractionConsumed = reader.float(); - break; - case 4: - message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); - break; - case 3: - message.isSplittable = reader.bool(); - break; + case 1: { + message.estimatedRowCount = reader.int64(); + break; + } + case 2: { + message.fractionConsumed = reader.float(); + break; + } + case 4: { + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); + break; + } + case 3: { + message.isSplittable = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -12684,6 +13513,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for StreamStatus + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamStatus"; + }; + return StreamStatus; })(); @@ -12790,12 +13634,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.atResponseStart = reader.float(); - break; - case 2: - message.atResponseEnd = reader.float(); - break; + case 1: { + message.atResponseStart = reader.float(); + break; + } + case 2: { + message.atResponseEnd = reader.float(); + break; + } default: reader.skipType(tag & 7); break; @@ -12894,6 +13740,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Progress + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Progress"; + }; + return Progress; })(); @@ -12989,9 +13850,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.throttlePercent = reader.int32(); - break; + case 1: { + message.throttlePercent = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -13081,6 +13943,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ThrottleStatus + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ThrottleStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ThrottleStatus"; + }; + return ThrottleStatus; })(); @@ -13234,21 +14111,26 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 3: - message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); - break; - case 4: - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); - break; - case 6: - message.rowCount = reader.int64(); - break; - case 2: - message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); - break; - case 5: - message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); - break; + case 3: { + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); + break; + } + case 4: { + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + } + case 6: { + message.rowCount = reader.int64(); + break; + } + case 2: { + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); + break; + } + case 5: { + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -13418,6 +14300,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReadRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"; + }; + return ReadRowsResponse; })(); @@ -13524,12 +14421,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); - break; - case 2: - message.requestedStreams = reader.int32(); - break; + case 1: { + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); + break; + } + case 2: { + message.requestedStreams = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -13633,6 +14532,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BatchCreateReadSessionStreamsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateReadSessionStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest"; + }; + return BatchCreateReadSessionStreamsRequest; })(); @@ -13730,11 +14644,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); - break; + case 1: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -13841,6 +14756,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BatchCreateReadSessionStreamsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateReadSessionStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"; + }; + return BatchCreateReadSessionStreamsResponse; })(); @@ -13936,9 +14866,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; + case 2: { + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -14033,6 +14964,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FinalizeStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest"; + }; + return FinalizeStreamRequest; })(); @@ -14139,12 +15085,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - case 2: - message.fraction = reader.float(); - break; + case 1: { + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.fraction = reader.float(); + break; + } default: reader.skipType(tag & 7); break; @@ -14248,6 +15196,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for SplitReadStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest"; + }; + return SplitReadStreamRequest; })(); @@ -14354,12 +15317,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - case 2: - message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; + case 1: { + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -14468,6 +15433,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for SplitReadStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"; + }; + return SplitReadStreamResponse; })(); @@ -14585,15 +15565,18 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.projectId = reader.string(); - break; - case 2: - message.datasetId = reader.string(); - break; - case 3: - message.tableId = reader.string(); - break; + case 1: { + message.projectId = reader.string(); + break; + } + case 2: { + message.datasetId = reader.string(); + break; + } + case 3: { + message.tableId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -14700,6 +15683,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableReference + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReference"; + }; + return TableReference; })(); @@ -14795,9 +15793,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; + case 1: { + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -14892,6 +15891,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for TableModifiers + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableModifiers"; + }; + return TableModifiers; })(); @@ -15010,11 +16024,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.file && message.file.length)) - message.file = []; - message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); - break; + case 1: { + if (!(message.file && message.file.length)) + message.file = []; + message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -15121,6 +16136,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FileDescriptorSet + * @function getTypeUrl + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileDescriptorSet.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileDescriptorSet"; + }; + return FileDescriptorSet; })(); @@ -15142,6 +16172,7 @@ * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo * @property {string|null} [syntax] FileDescriptorProto syntax + * @property {string|null} [edition] FileDescriptorProto edition */ /** @@ -15262,6 +16293,14 @@ */ FileDescriptorProto.prototype.syntax = ""; + /** + * FileDescriptorProto edition. + * @member {string} edition + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.edition = ""; + /** * Creates a new FileDescriptorProto instance using the specified properties. * @function create @@ -15317,6 +16356,8 @@ writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); + if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) + writer.uint32(/* id 13, wireType 2 =*/106).string(message.edition); return writer; }; @@ -15351,66 +16392,82 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message["package"] = reader.string(); - break; - case 3: - if (!(message.dependency && message.dependency.length)) - message.dependency = []; - message.dependency.push(reader.string()); - break; - case 10: - if (!(message.publicDependency && message.publicDependency.length)) - message.publicDependency = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message["package"] = reader.string(); + break; + } + case 3: { + if (!(message.dependency && message.dependency.length)) + message.dependency = []; + message.dependency.push(reader.string()); + break; + } + case 10: { + if (!(message.publicDependency && message.publicDependency.length)) + message.publicDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.publicDependency.push(reader.int32()); + } else message.publicDependency.push(reader.int32()); - } else - message.publicDependency.push(reader.int32()); - break; - case 11: - if (!(message.weakDependency && message.weakDependency.length)) - message.weakDependency = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + break; + } + case 11: { + if (!(message.weakDependency && message.weakDependency.length)) + message.weakDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.weakDependency.push(reader.int32()); + } else message.weakDependency.push(reader.int32()); - } else - message.weakDependency.push(reader.int32()); - break; - case 4: - if (!(message.messageType && message.messageType.length)) - message.messageType = []; - message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - case 6: - if (!(message.service && message.service.length)) - message.service = []; - message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); - break; - case 7: - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 8: - message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); - break; - case 9: - message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); - break; - case 12: - message.syntax = reader.string(); - break; + break; + } + case 4: { + if (!(message.messageType && message.messageType.length)) + message.messageType = []; + message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 6: { + if (!(message.service && message.service.length)) + message.service = []; + message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 7: { + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 8: { + message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); + break; + } + case 9: { + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); + break; + } + case 12: { + message.syntax = reader.string(); + break; + } + case 13: { + message.edition = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -15522,6 +16579,9 @@ if (message.syntax != null && message.hasOwnProperty("syntax")) if (!$util.isString(message.syntax)) return "syntax: string expected"; + if (message.edition != null && message.hasOwnProperty("edition")) + if (!$util.isString(message.edition)) + return "edition: string expected"; return null; }; @@ -15614,6 +16674,8 @@ } if (object.syntax != null) message.syntax = String(object.syntax); + if (object.edition != null) + message.edition = String(object.edition); return message; }; @@ -15645,6 +16707,7 @@ object.options = null; object.sourceCodeInfo = null; object.syntax = ""; + object.edition = ""; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -15691,6 +16754,8 @@ } if (message.syntax != null && message.hasOwnProperty("syntax")) object.syntax = message.syntax; + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = message.edition; return object; }; @@ -15705,6 +16770,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FileDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileDescriptorProto"; + }; + return FileDescriptorProto; })(); @@ -15915,52 +16995,62 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.field && message.field.length)) - message.field = []; - message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 6: - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - if (!(message.nestedType && message.nestedType.length)) - message.nestedType = []; - message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - case 4: - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.extensionRange && message.extensionRange.length)) - message.extensionRange = []; - message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); - break; - case 8: - if (!(message.oneofDecl && message.oneofDecl.length)) - message.oneofDecl = []; - message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); - break; - case 7: - message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); - break; - case 9: - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); - break; - case 10: - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.field && message.field.length)) + message.field = []; + message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 6: { + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + if (!(message.nestedType && message.nestedType.length)) + message.nestedType = []; + message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + } + case 4: { + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.extensionRange && message.extensionRange.length)) + message.extensionRange = []; + message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); + break; + } + case 8: { + if (!(message.oneofDecl && message.oneofDecl.length)) + message.oneofDecl = []; + message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 7: { + message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); + break; + } + case 9: { + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); + break; + } + case 10: { + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -16261,6 +17351,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for DescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto"; + }; + DescriptorProto.ExtensionRange = (function() { /** @@ -16375,15 +17480,18 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.start = reader.int32(); - break; - case 2: - message.end = reader.int32(); - break; - case 3: - message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); - break; + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } + case 3: { + message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -16495,6 +17603,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ExtensionRange + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExtensionRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto.ExtensionRange"; + }; + return ExtensionRange; })(); @@ -16601,12 +17724,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.start = reader.int32(); - break; - case 2: - message.end = reader.int32(); - break; + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -16705,6 +17830,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ReservedRange + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto.ReservedRange"; + }; + return ReservedRange; })(); @@ -16805,11 +17945,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -16916,6 +18057,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ExtensionRangeOptions + * @function getTypeUrl + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExtensionRangeOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ExtensionRangeOptions"; + }; + return ExtensionRangeOptions; })(); @@ -17121,39 +18277,50 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 3: - message.number = reader.int32(); - break; - case 4: - message.label = reader.int32(); - break; - case 5: - message.type = reader.int32(); - break; - case 6: - message.typeName = reader.string(); - break; - case 2: - message.extendee = reader.string(); - break; - case 7: - message.defaultValue = reader.string(); - break; - case 9: - message.oneofIndex = reader.int32(); - break; - case 10: - message.jsonName = reader.string(); - break; - case 8: - message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); - break; - case 17: - message.proto3Optional = reader.bool(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 3: { + message.number = reader.int32(); + break; + } + case 4: { + message.label = reader.int32(); + break; + } + case 5: { + message.type = reader.int32(); + break; + } + case 6: { + message.typeName = reader.string(); + break; + } + case 2: { + message.extendee = reader.string(); + break; + } + case 7: { + message.defaultValue = reader.string(); + break; + } + case 9: { + message.oneofIndex = reader.int32(); + break; + } + case 10: { + message.jsonName = reader.string(); + break; + } + case 8: { + message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); + break; + } + case 17: { + message.proto3Optional = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -17440,6 +18607,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FieldDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldDescriptorProto"; + }; + /** * Type enum. * @name google.protobuf.FieldDescriptorProto.Type @@ -17608,12 +18790,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -17717,6 +18901,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for OneofDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + OneofDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.OneofDescriptorProto"; + }; + return OneofDescriptorProto; })(); @@ -17862,27 +19061,32 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.value && message.value.length)) - message.value = []; - message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); - break; - case 4: - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); - break; - case 5: - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.value && message.value.length)) + message.value = []; + message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -18058,6 +19262,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for EnumDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto"; + }; + EnumDescriptorProto.EnumReservedRange = (function() { /** @@ -18161,12 +19380,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.start = reader.int32(); - break; - case 2: - message.end = reader.int32(); - break; + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -18265,6 +19486,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for EnumReservedRange + * @function getTypeUrl + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto.EnumReservedRange"; + }; + return EnumReservedRange; })(); @@ -18385,15 +19621,18 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.number = reader.int32(); - break; - case 3: - message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.number = reader.int32(); + break; + } + case 3: { + message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -18505,6 +19744,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for EnumValueDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumValueDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumValueDescriptorProto"; + }; + return EnumValueDescriptorProto; })(); @@ -18624,17 +19878,20 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - if (!(message.method && message.method.length)) - message.method = []; - message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); - break; - case 3: - message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.method && message.method.length)) + message.method = []; + message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -18764,6 +20021,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ServiceDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ServiceDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ServiceDescriptorProto"; + }; + return ServiceDescriptorProto; })(); @@ -18914,24 +20186,30 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.inputType = reader.string(); - break; - case 3: - message.outputType = reader.string(); - break; - case 4: - message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); - break; - case 5: - message.clientStreaming = reader.bool(); - break; - case 6: - message.serverStreaming = reader.bool(); - break; + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.inputType = reader.string(); + break; + } + case 3: { + message.outputType = reader.string(); + break; + } + case 4: { + message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); + break; + } + case 5: { + message.clientStreaming = reader.bool(); + break; + } + case 6: { + message.serverStreaming = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -19067,6 +20345,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for MethodDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MethodDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MethodDescriptorProto"; + }; + return MethodDescriptorProto; })(); @@ -19397,76 +20690,98 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.javaPackage = reader.string(); - break; - case 8: - message.javaOuterClassname = reader.string(); - break; - case 10: - message.javaMultipleFiles = reader.bool(); - break; - case 20: - message.javaGenerateEqualsAndHash = reader.bool(); - break; - case 27: - message.javaStringCheckUtf8 = reader.bool(); - break; - case 9: - message.optimizeFor = reader.int32(); - break; - case 11: - message.goPackage = reader.string(); - break; - case 16: - message.ccGenericServices = reader.bool(); - break; - case 17: - message.javaGenericServices = reader.bool(); - break; - case 18: - message.pyGenericServices = reader.bool(); - break; - case 42: - message.phpGenericServices = reader.bool(); - break; - case 23: - message.deprecated = reader.bool(); - break; - case 31: - message.ccEnableArenas = reader.bool(); - break; - case 36: - message.objcClassPrefix = reader.string(); - break; - case 37: - message.csharpNamespace = reader.string(); - break; - case 39: - message.swiftPrefix = reader.string(); - break; - case 40: - message.phpClassPrefix = reader.string(); - break; - case 41: - message.phpNamespace = reader.string(); - break; - case 44: - message.phpMetadataNamespace = reader.string(); - break; - case 45: - message.rubyPackage = reader.string(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1053: - if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) - message[".google.api.resourceDefinition"] = []; - message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); - break; + case 1: { + message.javaPackage = reader.string(); + break; + } + case 8: { + message.javaOuterClassname = reader.string(); + break; + } + case 10: { + message.javaMultipleFiles = reader.bool(); + break; + } + case 20: { + message.javaGenerateEqualsAndHash = reader.bool(); + break; + } + case 27: { + message.javaStringCheckUtf8 = reader.bool(); + break; + } + case 9: { + message.optimizeFor = reader.int32(); + break; + } + case 11: { + message.goPackage = reader.string(); + break; + } + case 16: { + message.ccGenericServices = reader.bool(); + break; + } + case 17: { + message.javaGenericServices = reader.bool(); + break; + } + case 18: { + message.pyGenericServices = reader.bool(); + break; + } + case 42: { + message.phpGenericServices = reader.bool(); + break; + } + case 23: { + message.deprecated = reader.bool(); + break; + } + case 31: { + message.ccEnableArenas = reader.bool(); + break; + } + case 36: { + message.objcClassPrefix = reader.string(); + break; + } + case 37: { + message.csharpNamespace = reader.string(); + break; + } + case 39: { + message.swiftPrefix = reader.string(); + break; + } + case 40: { + message.phpClassPrefix = reader.string(); + break; + } + case 41: { + message.phpNamespace = reader.string(); + break; + } + case 44: { + message.phpMetadataNamespace = reader.string(); + break; + } + case 45: { + message.rubyPackage = reader.string(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1053: { + if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) + message[".google.api.resourceDefinition"] = []; + message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -19779,6 +21094,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FileOptions + * @function getTypeUrl + * @memberof google.protobuf.FileOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileOptions"; + }; + /** * OptimizeMode enum. * @name google.protobuf.FileOptions.OptimizeMode @@ -19947,26 +21277,32 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.messageSetWireFormat = reader.bool(); - break; - case 2: - message.noStandardDescriptorAccessor = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 7: - message.mapEntry = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1053: - message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); - break; + case 1: { + message.messageSetWireFormat = reader.bool(); + break; + } + case 2: { + message.noStandardDescriptorAccessor = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 7: { + message.mapEntry = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1053: { + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -20120,6 +21456,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for MessageOptions + * @function getTypeUrl + * @memberof google.protobuf.MessageOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MessageOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MessageOptions"; + }; + return MessageOptions; })(); @@ -20133,6 +21484,7 @@ * @property {boolean|null} [packed] FieldOptions packed * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype * @property {boolean|null} [lazy] FieldOptions lazy + * @property {boolean|null} [unverifiedLazy] FieldOptions unverifiedLazy * @property {boolean|null} [deprecated] FieldOptions deprecated * @property {boolean|null} [weak] FieldOptions weak * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption @@ -20189,6 +21541,14 @@ */ FieldOptions.prototype.lazy = false; + /** + * FieldOptions unverifiedLazy. + * @member {boolean} unverifiedLazy + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.unverifiedLazy = false; + /** * FieldOptions deprecated. * @member {boolean} deprecated @@ -20265,6 +21625,8 @@ writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); + if (message.unverifiedLazy != null && Object.hasOwnProperty.call(message, "unverifiedLazy")) + writer.uint32(/* id 15, wireType 0 =*/120).bool(message.unverifiedLazy); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -20310,42 +21672,55 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.ctype = reader.int32(); - break; - case 2: - message.packed = reader.bool(); - break; - case 6: - message.jstype = reader.int32(); - break; - case 5: - message.lazy = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 10: - message.weak = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1052: - if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) - message[".google.api.fieldBehavior"] = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + case 1: { + message.ctype = reader.int32(); + break; + } + case 2: { + message.packed = reader.bool(); + break; + } + case 6: { + message.jstype = reader.int32(); + break; + } + case 5: { + message.lazy = reader.bool(); + break; + } + case 15: { + message.unverifiedLazy = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 10: { + message.weak = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1052: { + if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) + message[".google.api.fieldBehavior"] = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message[".google.api.fieldBehavior"].push(reader.int32()); + } else message[".google.api.fieldBehavior"].push(reader.int32()); - } else - message[".google.api.fieldBehavior"].push(reader.int32()); - break; - case 1055: - message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); - break; + break; + } + case 1055: { + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -20405,6 +21780,9 @@ if (message.lazy != null && message.hasOwnProperty("lazy")) if (typeof message.lazy !== "boolean") return "lazy: boolean expected"; + if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) + if (typeof message.unverifiedLazy !== "boolean") + return "unverifiedLazy: boolean expected"; if (message.deprecated != null && message.hasOwnProperty("deprecated")) if (typeof message.deprecated !== "boolean") return "deprecated: boolean expected"; @@ -20490,6 +21868,8 @@ } if (object.lazy != null) message.lazy = Boolean(object.lazy); + if (object.unverifiedLazy != null) + message.unverifiedLazy = Boolean(object.unverifiedLazy); if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); if (object.weak != null) @@ -20577,6 +21957,7 @@ object.lazy = false; object.jstype = options.enums === String ? "JS_NORMAL" : 0; object.weak = false; + object.unverifiedLazy = false; object[".google.api.resourceReference"] = null; } if (message.ctype != null && message.hasOwnProperty("ctype")) @@ -20591,6 +21972,8 @@ object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; if (message.weak != null && message.hasOwnProperty("weak")) object.weak = message.weak; + if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) + object.unverifiedLazy = message.unverifiedLazy; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -20617,6 +22000,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FieldOptions + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions"; + }; + /** * CType enum. * @name google.protobuf.FieldOptions.CType @@ -20746,11 +22144,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -20857,6 +22256,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for OneofOptions + * @function getTypeUrl + * @memberof google.protobuf.OneofOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + OneofOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.OneofOptions"; + }; + return OneofOptions; })(); @@ -20976,17 +22390,20 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - message.allowAlias = reader.bool(); - break; - case 3: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; + case 2: { + message.allowAlias = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -21111,6 +22528,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for EnumOptions + * @function getTypeUrl + * @memberof google.protobuf.EnumOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumOptions"; + }; + return EnumOptions; })(); @@ -21219,14 +22651,16 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; + case 1: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -21342,6 +22776,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for EnumValueOptions + * @function getTypeUrl + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumValueOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumValueOptions"; + }; + return EnumValueOptions; })(); @@ -21472,20 +22921,24 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 33: - message.deprecated = reader.bool(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 1049: - message[".google.api.defaultHost"] = reader.string(); - break; - case 1050: - message[".google.api.oauthScopes"] = reader.string(); - break; + case 33: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1049: { + message[".google.api.defaultHost"] = reader.string(); + break; + } + case 1050: { + message[".google.api.oauthScopes"] = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -21618,6 +23071,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ServiceOptions + * @function getTypeUrl + * @memberof google.protobuf.ServiceOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ServiceOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ServiceOptions"; + }; + return ServiceOptions; })(); @@ -21761,25 +23229,30 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 33: - message.deprecated = reader.bool(); - break; - case 34: - message.idempotencyLevel = reader.int32(); - break; - case 999: - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - case 72295728: - message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); - break; - case 1051: - if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) - message[".google.api.methodSignature"] = []; - message[".google.api.methodSignature"].push(reader.string()); - break; + case 33: { + message.deprecated = reader.bool(); + break; + } + case 34: { + message.idempotencyLevel = reader.int32(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 72295728: { + message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); + break; + } + case 1051: { + if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) + message[".google.api.methodSignature"] = []; + message[".google.api.methodSignature"].push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -21956,6 +23429,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for MethodOptions + * @function getTypeUrl + * @memberof google.protobuf.MethodOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MethodOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MethodOptions"; + }; + /** * IdempotencyLevel enum. * @name google.protobuf.MethodOptions.IdempotencyLevel @@ -22135,29 +23623,36 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 2: - if (!(message.name && message.name.length)) - message.name = []; - message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); - break; - case 3: - message.identifierValue = reader.string(); - break; - case 4: - message.positiveIntValue = reader.uint64(); - break; - case 5: - message.negativeIntValue = reader.int64(); - break; - case 6: - message.doubleValue = reader.double(); - break; - case 7: - message.stringValue = reader.bytes(); - break; - case 8: - message.aggregateValue = reader.string(); - break; + case 2: { + if (!(message.name && message.name.length)) + message.name = []; + message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); + break; + } + case 3: { + message.identifierValue = reader.string(); + break; + } + case 4: { + message.positiveIntValue = reader.uint64(); + break; + } + case 5: { + message.negativeIntValue = reader.int64(); + break; + } + case 6: { + message.doubleValue = reader.double(); + break; + } + case 7: { + message.stringValue = reader.bytes(); + break; + } + case 8: { + message.aggregateValue = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -22270,7 +23765,7 @@ if (object.stringValue != null) if (typeof object.stringValue === "string") $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); - else if (object.stringValue.length) + else if (object.stringValue.length >= 0) message.stringValue = object.stringValue; if (object.aggregateValue != null) message.aggregateValue = String(object.aggregateValue); @@ -22351,6 +23846,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for UninterpretedOption + * @function getTypeUrl + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UninterpretedOption.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UninterpretedOption"; + }; + UninterpretedOption.NamePart = (function() { /** @@ -22452,12 +23962,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.namePart = reader.string(); - break; - case 2: - message.isExtension = reader.bool(); - break; + case 1: { + message.namePart = reader.string(); + break; + } + case 2: { + message.isExtension = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -22558,6 +24070,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for NamePart + * @function getTypeUrl + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + NamePart.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UninterpretedOption.NamePart"; + }; + return NamePart; })(); @@ -22658,11 +24185,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.location && message.location.length)) - message.location = []; - message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); - break; + case 1: { + if (!(message.location && message.location.length)) + message.location = []; + message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -22769,6 +24297,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for SourceCodeInfo + * @function getTypeUrl + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SourceCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.SourceCodeInfo"; + }; + SourceCodeInfo.Location = (function() { /** @@ -22917,37 +24460,42 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + case 1: { + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - case 2: - if (!(message.span && message.span.length)) - message.span = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + break; + } + case 2: { + if (!(message.span && message.span.length)) + message.span = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.span.push(reader.int32()); + } else message.span.push(reader.int32()); - } else - message.span.push(reader.int32()); - break; - case 3: - message.leadingComments = reader.string(); - break; - case 4: - message.trailingComments = reader.string(); - break; - case 6: - if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) - message.leadingDetachedComments = []; - message.leadingDetachedComments.push(reader.string()); - break; + break; + } + case 3: { + message.leadingComments = reader.string(); + break; + } + case 4: { + message.trailingComments = reader.string(); + break; + } + case 6: { + if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) + message.leadingDetachedComments = []; + message.leadingDetachedComments.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -23108,6 +24656,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Location + * @function getTypeUrl + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Location.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.SourceCodeInfo.Location"; + }; + return Location; })(); @@ -23208,11 +24771,12 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.annotation && message.annotation.length)) - message.annotation = []; - message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); - break; + case 1: { + if (!(message.annotation && message.annotation.length)) + message.annotation = []; + message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -23319,6 +24883,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for GeneratedCodeInfo + * @function getTypeUrl + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + GeneratedCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo"; + }; + GeneratedCodeInfo.Annotation = (function() { /** @@ -23329,6 +24908,7 @@ * @property {string|null} [sourceFile] Annotation sourceFile * @property {number|null} [begin] Annotation begin * @property {number|null} [end] Annotation end + * @property {google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null} [semantic] Annotation semantic */ /** @@ -23379,6 +24959,14 @@ */ Annotation.prototype.end = 0; + /** + * Annotation semantic. + * @member {google.protobuf.GeneratedCodeInfo.Annotation.Semantic} semantic + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.semantic = 0; + /** * Creates a new Annotation instance using the specified properties. * @function create @@ -23415,6 +25003,8 @@ writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); + if (message.semantic != null && Object.hasOwnProperty.call(message, "semantic")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.semantic); return writer; }; @@ -23449,25 +25039,33 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + case 1: { + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - case 2: - message.sourceFile = reader.string(); - break; - case 3: - message.begin = reader.int32(); - break; - case 4: - message.end = reader.int32(); - break; + break; + } + case 2: { + message.sourceFile = reader.string(); + break; + } + case 3: { + message.begin = reader.int32(); + break; + } + case 4: { + message.end = reader.int32(); + break; + } + case 5: { + message.semantic = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -23519,6 +25117,15 @@ if (message.end != null && message.hasOwnProperty("end")) if (!$util.isInteger(message.end)) return "end: integer expected"; + if (message.semantic != null && message.hasOwnProperty("semantic")) + switch (message.semantic) { + default: + return "semantic: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -23547,6 +25154,20 @@ message.begin = object.begin | 0; if (object.end != null) message.end = object.end | 0; + switch (object.semantic) { + case "NONE": + case 0: + message.semantic = 0; + break; + case "SET": + case 1: + message.semantic = 1; + break; + case "ALIAS": + case 2: + message.semantic = 2; + break; + } return message; }; @@ -23569,6 +25190,7 @@ object.sourceFile = ""; object.begin = 0; object.end = 0; + object.semantic = options.enums === String ? "NONE" : 0; } if (message.path && message.path.length) { object.path = []; @@ -23581,6 +25203,8 @@ object.begin = message.begin; if (message.end != null && message.hasOwnProperty("end")) object.end = message.end; + if (message.semantic != null && message.hasOwnProperty("semantic")) + object.semantic = options.enums === String ? $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] : message.semantic; return object; }; @@ -23595,6 +25219,37 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Annotation + * @function getTypeUrl + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Annotation.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo.Annotation"; + }; + + /** + * Semantic enum. + * @name google.protobuf.GeneratedCodeInfo.Annotation.Semantic + * @enum {number} + * @property {number} NONE=0 NONE value + * @property {number} SET=1 SET value + * @property {number} ALIAS=2 ALIAS value + */ + Annotation.Semantic = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "NONE"] = 0; + values[valuesById[1] = "SET"] = 1; + values[valuesById[2] = "ALIAS"] = 2; + return values; + })(); + return Annotation; })(); @@ -23704,12 +25359,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.seconds = reader.int64(); - break; - case 2: - message.nanos = reader.int32(); - break; + case 1: { + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -23822,6 +25479,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + return Timestamp; })(); @@ -23917,9 +25589,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.double(); - break; + case 1: { + message.value = reader.double(); + break; + } default: reader.skipType(tag & 7); break; @@ -24009,6 +25682,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for DoubleValue + * @function getTypeUrl + * @memberof google.protobuf.DoubleValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DoubleValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DoubleValue"; + }; + return DoubleValue; })(); @@ -24104,9 +25792,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.float(); - break; + case 1: { + message.value = reader.float(); + break; + } default: reader.skipType(tag & 7); break; @@ -24196,6 +25885,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for FloatValue + * @function getTypeUrl + * @memberof google.protobuf.FloatValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FloatValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FloatValue"; + }; + return FloatValue; })(); @@ -24291,9 +25995,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.int64(); - break; + case 1: { + message.value = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -24397,6 +26102,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Int64Value + * @function getTypeUrl + * @memberof google.protobuf.Int64Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Int64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Int64Value"; + }; + return Int64Value; })(); @@ -24492,9 +26212,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.uint64(); - break; + case 1: { + message.value = reader.uint64(); + break; + } default: reader.skipType(tag & 7); break; @@ -24598,6 +26319,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for UInt64Value + * @function getTypeUrl + * @memberof google.protobuf.UInt64Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UInt64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UInt64Value"; + }; + return UInt64Value; })(); @@ -24693,9 +26429,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.int32(); - break; + case 1: { + message.value = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -24785,6 +26522,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Int32Value + * @function getTypeUrl + * @memberof google.protobuf.Int32Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Int32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Int32Value"; + }; + return Int32Value; })(); @@ -24880,9 +26632,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.uint32(); - break; + case 1: { + message.value = reader.uint32(); + break; + } default: reader.skipType(tag & 7); break; @@ -24972,6 +26725,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for UInt32Value + * @function getTypeUrl + * @memberof google.protobuf.UInt32Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UInt32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UInt32Value"; + }; + return UInt32Value; })(); @@ -25067,9 +26835,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.bool(); - break; + case 1: { + message.value = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -25159,6 +26928,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BoolValue + * @function getTypeUrl + * @memberof google.protobuf.BoolValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BoolValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.BoolValue"; + }; + return BoolValue; })(); @@ -25254,9 +27038,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.string(); - break; + case 1: { + message.value = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -25346,6 +27131,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for StringValue + * @function getTypeUrl + * @memberof google.protobuf.StringValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StringValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.StringValue"; + }; + return StringValue; })(); @@ -25441,9 +27241,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.value = reader.bytes(); - break; + case 1: { + message.value = reader.bytes(); + break; + } default: reader.skipType(tag & 7); break; @@ -25500,7 +27301,7 @@ if (object.value != null) if (typeof object.value === "string") $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); - else if (object.value.length) + else if (object.value.length >= 0) message.value = object.value; return message; }; @@ -25542,6 +27343,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for BytesValue + * @function getTypeUrl + * @memberof google.protobuf.BytesValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BytesValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.BytesValue"; + }; + return BytesValue; })(); @@ -25648,12 +27464,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.type_url = reader.string(); - break; - case 2: - message.value = reader.bytes(); - break; + case 1: { + message.type_url = reader.string(); + break; + } + case 2: { + message.value = reader.bytes(); + break; + } default: reader.skipType(tag & 7); break; @@ -25715,7 +27533,7 @@ if (object.value != null) if (typeof object.value === "string") $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); - else if (object.value.length) + else if (object.value.length >= 0) message.value = object.value; return message; }; @@ -25761,6 +27579,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Any + * @function getTypeUrl + * @memberof google.protobuf.Any + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Any.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Any"; + }; + return Any; })(); @@ -25921,6 +27754,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Empty + * @function getTypeUrl + * @memberof google.protobuf.Empty + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Empty.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Empty"; + }; + return Empty; })(); @@ -26041,14 +27889,16 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (!(message.rules && message.rules.length)) - message.rules = []; - message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); - break; - case 2: - message.fullyDecodeReservedExpansion = reader.bool(); - break; + case 1: { + if (!(message.rules && message.rules.length)) + message.rules = []; + message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + } + case 2: { + message.fullyDecodeReservedExpansion = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -26164,6 +28014,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Http + * @function getTypeUrl + * @memberof google.api.Http + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Http.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.Http"; + }; + return Http; })(); @@ -26374,38 +28239,48 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.selector = reader.string(); - break; - case 2: - message.get = reader.string(); - break; - case 3: - message.put = reader.string(); - break; - case 4: - message.post = reader.string(); - break; - case 5: - message["delete"] = reader.string(); - break; - case 6: - message.patch = reader.string(); - break; - case 8: - message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); - break; - case 7: - message.body = reader.string(); - break; - case 12: - message.responseBody = reader.string(); - break; - case 11: - if (!(message.additionalBindings && message.additionalBindings.length)) - message.additionalBindings = []; - message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); - break; + case 1: { + message.selector = reader.string(); + break; + } + case 2: { + message.get = reader.string(); + break; + } + case 3: { + message.put = reader.string(); + break; + } + case 4: { + message.post = reader.string(); + break; + } + case 5: { + message["delete"] = reader.string(); + break; + } + case 6: { + message.patch = reader.string(); + break; + } + case 8: { + message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); + break; + } + case 7: { + message.body = reader.string(); + break; + } + case 12: { + message.responseBody = reader.string(); + break; + } + case 11: { + if (!(message.additionalBindings && message.additionalBindings.length)) + message.additionalBindings = []; + message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -26627,6 +28502,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for HttpRule + * @function getTypeUrl + * @memberof google.api.HttpRule + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + HttpRule.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.HttpRule"; + }; + return HttpRule; })(); @@ -26733,12 +28623,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.kind = reader.string(); - break; - case 2: - message.path = reader.string(); - break; + case 1: { + message.kind = reader.string(); + break; + } + case 2: { + message.path = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -26837,6 +28729,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for CustomHttpPattern + * @function getTypeUrl + * @memberof google.api.CustomHttpPattern + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CustomHttpPattern.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.CustomHttpPattern"; + }; + return CustomHttpPattern; })(); @@ -27031,36 +28938,43 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.type = reader.string(); - break; - case 2: - if (!(message.pattern && message.pattern.length)) - message.pattern = []; - message.pattern.push(reader.string()); - break; - case 3: - message.nameField = reader.string(); - break; - case 4: - message.history = reader.int32(); - break; - case 5: - message.plural = reader.string(); - break; - case 6: - message.singular = reader.string(); - break; - case 10: - if (!(message.style && message.style.length)) - message.style = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) + case 1: { + message.type = reader.string(); + break; + } + case 2: { + if (!(message.pattern && message.pattern.length)) + message.pattern = []; + message.pattern.push(reader.string()); + break; + } + case 3: { + message.nameField = reader.string(); + break; + } + case 4: { + message.history = reader.int32(); + break; + } + case 5: { + message.plural = reader.string(); + break; + } + case 6: { + message.singular = reader.string(); + break; + } + case 10: { + if (!(message.style && message.style.length)) + message.style = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.style.push(reader.int32()); + } else message.style.push(reader.int32()); - } else - message.style.push(reader.int32()); - break; + break; + } default: reader.skipType(tag & 7); break; @@ -27258,6 +29172,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ResourceDescriptor + * @function getTypeUrl + * @memberof google.api.ResourceDescriptor + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ResourceDescriptor.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.ResourceDescriptor"; + }; + /** * History enum. * @name google.api.ResourceDescriptor.History @@ -27394,12 +29323,14 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.type = reader.string(); - break; - case 2: - message.childType = reader.string(); - break; + case 1: { + message.type = reader.string(); + break; + } + case 2: { + message.childType = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -27498,6 +29429,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for ResourceReference + * @function getTypeUrl + * @memberof google.api.ResourceReference + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ResourceReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.ResourceReference"; + }; + return ResourceReference; })(); @@ -27629,17 +29575,20 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.code = reader.int32(); - break; - case 2: - message.message = reader.string(); - break; - case 3: - if (!(message.details && message.details.length)) - message.details = []; - message.details.push($root.google.protobuf.Any.decode(reader, reader.uint32())); - break; + case 1: { + message.code = reader.int32(); + break; + } + case 2: { + message.message = reader.string(); + break; + } + case 3: { + if (!(message.details && message.details.length)) + message.details = []; + message.details.push($root.google.protobuf.Any.decode(reader, reader.uint32())); + break; + } default: reader.skipType(tag & 7); break; @@ -27764,6 +29713,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for Status + * @function getTypeUrl + * @memberof google.rpc.Status + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Status.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.rpc.Status"; + }; + return Status; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 389ea6f3f92..e2fa6466986 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1591,6 +1591,10 @@ "syntax": { "type": "string", "id": 12 + }, + "edition": { + "type": "string", + "id": 13 } } }, @@ -2119,6 +2123,13 @@ "default": false } }, + "unverifiedLazy": { + "type": "bool", + "id": 15, + "options": { + "default": false + } + }, "deprecated": { "type": "bool", "id": 3, @@ -2411,6 +2422,19 @@ "end": { "type": "int32", "id": 4 + }, + "semantic": { + "type": "Semantic", + "id": 5 + } + }, + "nested": { + "Semantic": { + "values": { + "NONE": 0, + "SET": 1, + "ALIAS": 2 + } } } } From e636fb2053874595f33876705178ec26a5c30a9a Mon Sep 17 00:00:00 2001 From: shollyman Date: Tue, 23 Aug 2022 13:23:43 -0700 Subject: [PATCH 190/333] docs: augment write API sample to add more types (#288) * docs: augment write API sample to add more types This PR adds some missing functionality to the appending data example for node, namely to demonstrate writing complex messages with nested message types. It augments the sample data proto to be in line with other languages and adds the testing to ensure that complex types are propagated through to query results. --- handwritten/bigquery-storage/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 40dfb1f6162..a83b819122b 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -189,7 +189,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | | Customer_record_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/customer_record_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/customer_record_pb.js,samples/README.md) | | BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | -| Sample_data_pb2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb2.js,samples/README.md) | +| Sample_data_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb.js,samples/README.md) | From ab4f2d6aab369718fa9d4f1ce6890d6026108a25 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 23 Aug 2022 21:26:13 +0000 Subject: [PATCH 191/333] fix: remove pip install statements (#1546) (#290) because the tools are already installed in the docker image as of https://github.com/googleapis/testing-infra-docker/pull/227 Source-Link: https://github.com/googleapis/synthtool/commit/ab7384ea1c30df8ec2e175566ef2508e6c3a2acb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:bb493bf01d28519e82ab61c490c20122c85a7119c03a978ad0c34b4239fbad15 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/.kokoro/publish.sh | 1 - handwritten/bigquery-storage/.kokoro/release/docs.sh | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index f7c796c60cd..748836981e2 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:74ab2b3c71ef27e6d8b69b1d0a0c9d31447777b79ac3cd4be82c265b45f37e5e -# created: 2022-08-22T22:07:00.791732705Z + digest: sha256:bb493bf01d28519e82ab61c490c20122c85a7119c03a978ad0c34b4239fbad15 +# created: 2022-08-23T18:40:55.597313991Z diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index 77a5defb2b5..949e3e1d0c2 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -19,7 +19,6 @@ set -eo pipefail export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Start the releasetool reporter -python3 -m pip install gcp-releasetool python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script cd $(dirname $0)/.. diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh index 4c866c86000..1d8f3f490a5 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs.sh @@ -29,7 +29,6 @@ npm run docs # create docs.metadata, based on package.json and .repo-metadata.json. npm i json@9.0.6 -g -python3 -m pip install --user gcp-docuploader python3 -m docuploader create-metadata \ --name=$(cat .repo-metadata.json | json name) \ --version=$(cat package.json | json version) \ From e5495e90d1ab597696eb35d03b6895f8cf52c1e7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 23 Aug 2022 15:02:12 -0700 Subject: [PATCH 192/333] chore(main): release 3.1.0 (#285) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 3.1.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 14 ++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ..._metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...data.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 5d9719cffc8..5e48ed039f2 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [3.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.0.0...v3.1.0) (2022-08-23) + + +### Features + +* allow users to set Apache Avro output format options through avro_serialization_options param in TableReadOptions message ([#284](https://github.com/googleapis/nodejs-bigquery-storage/issues/284)) ([99b8afc](https://github.com/googleapis/nodejs-bigquery-storage/commit/99b8afc3fb2aa1d47151b90924eab2016432034f)) + + +### Bug Fixes + +* better support for fallback mode ([#287](https://github.com/googleapis/nodejs-bigquery-storage/issues/287)) ([08b0bb2](https://github.com/googleapis/nodejs-bigquery-storage/commit/08b0bb2c300ce49a65121805ea674e9c56726a87)) +* change import long to require ([#289](https://github.com/googleapis/nodejs-bigquery-storage/issues/289)) ([63a3dc2](https://github.com/googleapis/nodejs-bigquery-storage/commit/63a3dc2bcbac775e8c41dd19248ef3cd4829c21f)) +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-bigquery-storage/issues/1546)) ([#290](https://github.com/googleapis/nodejs-bigquery-storage/issues/290)) ([1436388](https://github.com/googleapis/nodejs-bigquery-storage/commit/143638862040327e89c74c87a7018e2342576a95)) + ## [3.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v2.8.0...v3.0.0) (2022-06-29) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 367d3085b85..cefd5cc432c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.0.0", + "version": "3.1.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 1036b356fbb..e20ae7d6385 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.0.0", + "version": "3.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index ac58a9c66c9..01be75591a9 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.0.0", + "version": "3.1.0", "language": "TYPESCRIPT", "apis": [ { From 04307f94a3ec081b7344f349157e090c05ca6f7c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 27 Aug 2022 05:08:18 +0000 Subject: [PATCH 193/333] fix: do not import the whole google-gax from proto JS (#1553) (#291) fix: use google-gax v3.3.0 Source-Link: https://github.com/googleapis/synthtool/commit/c73d112a11a1f1a93efa67c50495c19aa3a88910 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:b15a6f06cc06dcffa11e1bebdf1a74b6775a134aac24a0f86f51ddf728eb373e --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/package.json | 2 +- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 748836981e2..4d586c42063 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:bb493bf01d28519e82ab61c490c20122c85a7119c03a978ad0c34b4239fbad15 -# created: 2022-08-23T18:40:55.597313991Z + digest: sha256:b15a6f06cc06dcffa11e1bebdf1a74b6775a134aac24a0f86f51ddf728eb373e +# created: 2022-08-26T22:34:55.905845397Z diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index cefd5cc432c..38b40127abd 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,7 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^3.0.1" + "google-gax": "^3.3.0" }, "devDependencies": { "@types/mocha": "^9.0.0", diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 2cfa3409b32..6dcd8c08147 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -13,7 +13,7 @@ // limitations under the License. import Long = require("long"); -import {protobuf as $protobuf} from "google-gax"; +import type {protobuf as $protobuf} from "google-gax"; /** Namespace google. */ export namespace google { diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 542a50d85f2..156259548cb 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -19,7 +19,7 @@ define(["protobufjs/minimal"], factory); /* CommonJS */ else if (typeof require === 'function' && typeof module === 'object' && module && module.exports) - module.exports = factory(require("google-gax").protobufMinimal); + module.exports = factory(require("google-gax/build/src/protobuf").protobufMinimal); })(this, function($protobuf) { "use strict"; From c5738f82680bf3c28763d86068faae01fdfe8a93 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 20:46:26 +0000 Subject: [PATCH 194/333] fix: allow passing gax instance to client constructor (#292) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 470911839 Source-Link: https://github.com/googleapis/googleapis/commit/352756699ebc5b2144c252867c265ea44448712e Source-Link: https://github.com/googleapis/googleapis-gen/commit/f16a1d224f00a630ea43d6a9a1a31f566f45cdea Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjE2YTFkMjI0ZjAwYTYzMGVhNDNkNmE5YTFhMzFmNTY2ZjQ1Y2RlYSJ9 feat: accept google-gax instance as a parameter Please see the documentation of the client constructor for details. PiperOrigin-RevId: 470332808 Source-Link: https://github.com/googleapis/googleapis/commit/d4a23675457cd8f0b44080e0594ec72de1291b89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e97a1ac204ead4fe7341f91e72db7c6ac6016341 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTk3YTFhYzIwNGVhZDRmZTczNDFmOTFlNzJkYjdjNmFjNjAxNjM0MSJ9 --- .../src/v1/big_query_read_client.ts | 40 +++++++++++------ .../src/v1/big_query_write_client.ts | 44 ++++++++++++------- .../src/v1beta1/big_query_storage_client.ts | 44 ++++++++++++------- 3 files changed, 85 insertions(+), 43 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 81c81a81d68..7ba3e1e633e 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -17,15 +17,13 @@ // ** All changes to this file may be overwritten. ** /* global window */ -import * as gax from 'google-gax'; -import { +import type * as gax from 'google-gax'; +import type { Callback, CallOptions, Descriptors, ClientOptions, - GoogleError, } from 'google-gax'; - import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); @@ -35,7 +33,6 @@ import jsonProtos = require('../../protos/protos.json'); * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './big_query_read_client_config.json'; - const version = require('../../../package.json').version; /** @@ -97,8 +94,18 @@ export class BigQueryReadClient { * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryReadClient({fallback: 'rest'}, gax); + * ``` */ - constructor(opts?: ClientOptions) { + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryReadClient; const servicePath = @@ -118,8 +125,13 @@ export class BigQueryReadClient { opts['scopes'] = staticMembers.scopes; } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); @@ -184,7 +196,7 @@ export class BigQueryReadClient { // Provide descriptors for these. this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.SERVER_STREAMING, + this._gaxModule.StreamType.SERVER_STREAMING, opts.fallback === 'rest' ), }; @@ -203,7 +215,7 @@ export class BigQueryReadClient { this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; + this.warn = this._gaxModule.warn; } /** @@ -253,7 +265,9 @@ export class BigQueryReadClient { setImmediate(() => { stream.emit( 'error', - new GoogleError('The client has already been closed.') + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) ); }); return stream; @@ -470,7 +484,7 @@ export class BigQueryReadClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'read_session.table': request.readSession!.table || '', }); this.initialize(); @@ -586,7 +600,7 @@ export class BigQueryReadClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -629,7 +643,7 @@ export class BigQueryReadClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ read_stream: request.readStream || '', }); this.initialize(); diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 02021a6bd26..10a5b597c6f 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -17,15 +17,13 @@ // ** All changes to this file may be overwritten. ** /* global window */ -import * as gax from 'google-gax'; -import { +import type * as gax from 'google-gax'; +import type { Callback, CallOptions, Descriptors, ClientOptions, - GoogleError, } from 'google-gax'; - import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); @@ -35,7 +33,6 @@ import jsonProtos = require('../../protos/protos.json'); * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './big_query_write_client_config.json'; - const version = require('../../../package.json').version; /** @@ -100,8 +97,18 @@ export class BigQueryWriteClient { * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryWriteClient({fallback: 'rest'}, gax); + * ``` */ - constructor(opts?: ClientOptions) { + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryWriteClient; const servicePath = @@ -121,8 +128,13 @@ export class BigQueryWriteClient { opts['scopes'] = staticMembers.scopes; } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); @@ -187,7 +199,7 @@ export class BigQueryWriteClient { // Provide descriptors for these. this.descriptors.stream = { appendRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.BIDI_STREAMING, + this._gaxModule.StreamType.BIDI_STREAMING, opts.fallback === 'rest' ), }; @@ -206,7 +218,7 @@ export class BigQueryWriteClient { this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; + this.warn = this._gaxModule.warn; } /** @@ -259,7 +271,9 @@ export class BigQueryWriteClient { setImmediate(() => { stream.emit( 'error', - new GoogleError('The client has already been closed.') + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) ); }); return stream; @@ -444,7 +458,7 @@ export class BigQueryWriteClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -542,7 +556,7 @@ export class BigQueryWriteClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -641,7 +655,7 @@ export class BigQueryWriteClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -746,7 +760,7 @@ export class BigQueryWriteClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -851,7 +865,7 @@ export class BigQueryWriteClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ write_stream: request.writeStream || '', }); this.initialize(); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index a6a6d6c9596..9b545b1af42 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -17,15 +17,13 @@ // ** All changes to this file may be overwritten. ** /* global window */ -import * as gax from 'google-gax'; -import { +import type * as gax from 'google-gax'; +import type { Callback, CallOptions, Descriptors, ClientOptions, - GoogleError, } from 'google-gax'; - import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); @@ -35,7 +33,6 @@ import jsonProtos = require('../../protos/protos.json'); * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './big_query_storage_client_config.json'; - const version = require('../../../package.json').version; /** @@ -97,8 +94,18 @@ export class BigQueryStorageClient { * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryStorageClient({fallback: 'rest'}, gax); + * ``` */ - constructor(opts?: ClientOptions) { + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryStorageClient; const servicePath = @@ -118,8 +125,13 @@ export class BigQueryStorageClient { opts['scopes'] = staticMembers.scopes; } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); @@ -178,7 +190,7 @@ export class BigQueryStorageClient { // Provide descriptors for these. this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( - gax.StreamType.SERVER_STREAMING, + this._gaxModule.StreamType.SERVER_STREAMING, opts.fallback === 'rest' ), }; @@ -197,7 +209,7 @@ export class BigQueryStorageClient { this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; + this.warn = this._gaxModule.warn; } /** @@ -250,7 +262,9 @@ export class BigQueryStorageClient { setImmediate(() => { stream.emit( 'error', - new GoogleError('The client has already been closed.') + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) ); }); return stream; @@ -458,7 +472,7 @@ export class BigQueryStorageClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'table_reference.project_id': request.tableReference!.projectId || '', 'table_reference.dataset_id': request.tableReference!.datasetId || '', }); @@ -563,7 +577,7 @@ export class BigQueryStorageClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'session.name': request.session!.name || '', }); this.initialize(); @@ -677,7 +691,7 @@ export class BigQueryStorageClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'stream.name': request.stream!.name || '', }); this.initialize(); @@ -794,7 +808,7 @@ export class BigQueryStorageClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'original_stream.name': request.originalStream!.name || '', }); this.initialize(); @@ -837,7 +851,7 @@ export class BigQueryStorageClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ 'read_position.stream.name': request.readPosition!.stream!.name || '', }); this.initialize(); From 11ea0df58f47a3bd1a535ec99a3b89550ec9dc85 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 18:01:53 -0400 Subject: [PATCH 195/333] chore(main): release 3.1.1 (#293) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 3.1.1 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- ...snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 5e48ed039f2..33229aabf90 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [3.1.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.0...v3.1.1) (2022-09-01) + + +### Bug Fixes + +* Allow passing gax instance to client constructor ([#292](https://github.com/googleapis/nodejs-bigquery-storage/issues/292)) ([89f953d](https://github.com/googleapis/nodejs-bigquery-storage/commit/89f953de24d774de076ce9aeef649122ab3d65a6)) +* Do not import the whole google-gax from proto JS ([#1553](https://github.com/googleapis/nodejs-bigquery-storage/issues/1553)) ([#291](https://github.com/googleapis/nodejs-bigquery-storage/issues/291)) ([507e378](https://github.com/googleapis/nodejs-bigquery-storage/commit/507e3780553fa339ffccbba9a8f9ac930d1e9c6d)) + ## [3.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.0.0...v3.1.0) (2022-08-23) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 38b40127abd..0453492655c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.1.0", + "version": "3.1.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index e20ae7d6385..6ff22b675bd 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.1.0", + "version": "3.1.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 01be75591a9..3e4305f2828 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.1.0", + "version": "3.1.1", "language": "TYPESCRIPT", "apis": [ { From 21d4cbde0712d2297b45fd429416aff0b262ea28 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Sep 2022 13:14:14 -0700 Subject: [PATCH 196/333] feat: add location to WriteStream and add WriteStreamView support (#295) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add location to WriteStream and add WriteStreamView support PiperOrigin-RevId: 472835596 Source-Link: https://github.com/googleapis/googleapis/commit/1cf9407d5a7c889b7b30bd8a8ea4b67275943ac9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/59d0f10badec867913ebc2eb4bac021f79904519 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTlkMGYxMGJhZGVjODY3OTEzZWJjMmViNGJhYzAyMWY3OTkwNDUxOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add proto annotation for non-ascii field mapping PiperOrigin-RevId: 473815595 Source-Link: https://github.com/googleapis/googleapis/commit/46cfc4c8d90cad2a878bac4f15aa5c8683374ec3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/955ca31d81100d81ed7e0ae0522358ac45d53cc9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTU1Y2EzMWQ4MTEwMGQ4MWVkN2UwYWUwNTIyMzU4YWM0NWQ1M2NjOSJ9 * chore: use gapic-generator-typescript v2.17.0 PiperOrigin-RevId: 474338479 Source-Link: https://github.com/googleapis/googleapis/commit/d5d35e0353b59719e8917103b1bc7df2782bf6ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/efcd3f93962a103f68f003e2a1eecde6fa216a27 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZWZjZDNmOTM5NjJhMTAzZjY4ZjAwM2UyYTFlZWNkZTZmYTIxNmEyNyJ9 * test: use fully qualified request type name in tests PiperOrigin-RevId: 475685359 Source-Link: https://github.com/googleapis/googleapis/commit/7a129736313ceb1f277c3b7f7e16d2e04cc901dd Source-Link: https://github.com/googleapis/googleapis-gen/commit/370c729e2ba062a167449c27882ba5f379c5c34d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzcwYzcyOWUyYmEwNjJhMTY3NDQ5YzI3ODgyYmE1ZjM3OWM1YzM0ZCJ9 * fix: regenerated locally * fix(deps): use google-gax v3.5.2 Co-authored-by: Owl Bot Co-authored-by: Alexander Fenster --- handwritten/bigquery-storage/package.json | 4 +- .../bigquery/storage/v1/annotations.proto | 28 + .../cloud/bigquery/storage/v1/storage.proto | 4 + .../cloud/bigquery/storage/v1/stream.proto | 22 + .../bigquery-storage/protos/protos.d.ts | 24 +- handwritten/bigquery-storage/protos/protos.js | 276 +++++++++- .../bigquery-storage/protos/protos.json | 33 +- .../v1/big_query_write.get_write_stream.js | 5 + ...data.google.cloud.bigquery.storage.v1.json | 6 +- .../src/v1/big_query_read_client.ts | 6 +- .../src/v1/big_query_read_proto_list.json | 1 + .../src/v1/big_query_write_client.ts | 13 +- .../src/v1/big_query_write_proto_list.json | 1 + .../src/v1beta1/big_query_storage_client.ts | 12 +- .../test/gapic_big_query_read_v1.ts | 265 +++++---- .../test/gapic_big_query_storage_v1beta1.ts | 516 ++++++++++-------- .../test/gapic_big_query_write_v1.ts | 466 ++++++++-------- 17 files changed, 1073 insertions(+), 609 deletions(-) create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 0453492655c..a74cc2991f9 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,7 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^3.3.0" + "google-gax": "^3.5.2" }, "devDependencies": { "@types/mocha": "^9.0.0", @@ -44,7 +44,7 @@ "pack-n-play": "^1.0.0-2", "sinon": "^14.0.0", "ts-loader": "^9.0.0", - "typescript": "^4.6.4", + "typescript": "^4.8.3", "webpack": "^5.0.0", "webpack-cli": "^4.0.0" }, diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto new file mode 100644 index 00000000000..1627fd12a0c --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +extend google.protobuf.FieldOptions { + // Setting the column_name extension allows users to reference + // bigquery column independently of the field name in the protocol buffer + // message. + // + // The intended use of this annotation is to reference a destination column + // named using characters unavailable for protobuf field names (e.g. unicode + // characters). + // + // More details about BigQuery naming limitations can be found here: + // https://cloud.google.com/bigquery/docs/schemas#column_names + // + // This extension is currently experimental. + optional string column_name = 454943157; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index e0b25c1afef..a49e1a389c2 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -524,6 +524,10 @@ message GetWriteStreamRequest { type: "bigquerystorage.googleapis.com/WriteStream" } ]; + + // Indicates whether to get full or partial view of the WriteStream. If + // not set, view returned will be basic. + WriteStreamView view = 3; } // Request message for `BatchCommitWriteStreams`. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 3735c73d670..fe71adfa6b7 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -203,6 +203,23 @@ message ReadStream { string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } +// WriteStreamView is a view enum that controls what details about a write +// stream should be returned. +enum WriteStreamView { + // The default / unset value. + WRITE_STREAM_VIEW_UNSPECIFIED = 0; + + // The BASIC projection returns basic metadata about a write stream. The + // basic view does not include schema information. This is the default view + // returned by GetWriteStream. + BASIC = 1; + + // The FULL projection returns all available write stream metadata, including + // the schema. CreateWriteStream returns the full projection of write stream + // metadata. + FULL = 2; +} + // Information about a single stream that gets data inside the storage system. message WriteStream { option (google.api.resource) = { @@ -261,4 +278,9 @@ message WriteStream { // Immutable. Mode of the stream. WriteMode write_mode = 7 [(google.api.field_behavior) = IMMUTABLE]; + + // Immutable. The geographic location where the stream's dataset resides. See + // https://cloud.google.com/bigquery/docs/locations for supported + // locations. + string location = 8 [(google.api.field_behavior) = IMMUTABLE]; } diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 6dcd8c08147..7d4eae00205 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -import Long = require("long"); import type {protobuf as $protobuf} from "google-gax"; +import Long = require("long"); /** Namespace google. */ export namespace google { @@ -2483,6 +2483,9 @@ export namespace google { /** GetWriteStreamRequest name */ name?: (string|null); + + /** GetWriteStreamRequest view */ + view?: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView|null); } /** Represents a GetWriteStreamRequest. */ @@ -2497,6 +2500,9 @@ export namespace google { /** GetWriteStreamRequest name. */ public name: string; + /** GetWriteStreamRequest view. */ + public view: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView); + /** * Creates a new GetWriteStreamRequest instance using the specified properties. * @param [properties] Properties to set @@ -3901,6 +3907,13 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** WriteStreamView enum. */ + enum WriteStreamView { + WRITE_STREAM_VIEW_UNSPECIFIED = 0, + BASIC = 1, + FULL = 2 + } + /** Properties of a WriteStream. */ interface IWriteStream { @@ -3921,6 +3934,9 @@ export namespace google { /** WriteStream writeMode */ writeMode?: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null); + + /** WriteStream location */ + location?: (string|null); } /** Represents a WriteStream. */ @@ -3950,6 +3966,9 @@ export namespace google { /** WriteStream writeMode. */ public writeMode: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode); + /** WriteStream location. */ + public location: string; + /** * Creates a new WriteStream instance using the specified properties. * @param [properties] Properties to set @@ -8663,6 +8682,9 @@ export namespace google { /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + /** FieldOptions .google.cloud.bigquery.storage.v1.columnName */ + ".google.cloud.bigquery.storage.v1.columnName"?: (string|null); + /** FieldOptions .google.api.fieldBehavior */ ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 156259548cb..53da7298f1a 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -693,6 +693,12 @@ return object; var message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); switch (object.bufferCompression) { + default: + if (typeof object.bufferCompression === "number") { + message.bufferCompression = object.bufferCompression; + break; + } + break; case "COMPRESSION_UNSPECIFIED": case 0: message.bufferCompression = 0; @@ -725,7 +731,7 @@ if (options.defaults) object.bufferCompression = options.enums === String ? "COMPRESSION_UNSPECIFIED" : 0; if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) - object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; + object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] === undefined ? message.bufferCompression : $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; return object; }; @@ -5631,6 +5637,7 @@ * @memberof google.cloud.bigquery.storage.v1 * @interface IGetWriteStreamRequest * @property {string|null} [name] GetWriteStreamRequest name + * @property {google.cloud.bigquery.storage.v1.WriteStreamView|null} [view] GetWriteStreamRequest view */ /** @@ -5656,6 +5663,14 @@ */ GetWriteStreamRequest.prototype.name = ""; + /** + * GetWriteStreamRequest view. + * @member {google.cloud.bigquery.storage.v1.WriteStreamView} view + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @instance + */ + GetWriteStreamRequest.prototype.view = 0; + /** * Creates a new GetWriteStreamRequest instance using the specified properties. * @function create @@ -5682,6 +5697,8 @@ writer = $Writer.create(); if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.view != null && Object.hasOwnProperty.call(message, "view")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.view); return writer; }; @@ -5720,6 +5737,10 @@ message.name = reader.string(); break; } + case 3: { + message.view = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -5758,6 +5779,15 @@ if (message.name != null && message.hasOwnProperty("name")) if (!$util.isString(message.name)) return "name: string expected"; + if (message.view != null && message.hasOwnProperty("view")) + switch (message.view) { + default: + return "view: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -5775,6 +5805,26 @@ var message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); if (object.name != null) message.name = String(object.name); + switch (object.view) { + default: + if (typeof object.view === "number") { + message.view = object.view; + break; + } + break; + case "WRITE_STREAM_VIEW_UNSPECIFIED": + case 0: + message.view = 0; + break; + case "BASIC": + case 1: + message.view = 1; + break; + case "FULL": + case 2: + message.view = 2; + break; + } return message; }; @@ -5791,10 +5841,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.name = ""; + object.view = options.enums === String ? "WRITE_STREAM_VIEW_UNSPECIFIED" : 0; + } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; + if (message.view != null && message.hasOwnProperty("view")) + object.view = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] === undefined ? message.view : $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] : message.view; return object; }; @@ -7391,6 +7445,12 @@ return object; var message = new $root.google.cloud.bigquery.storage.v1.StorageError(); switch (object.code) { + default: + if (typeof object.code === "number") { + message.code = object.code; + break; + } + break; case "STORAGE_ERROR_CODE_UNSPECIFIED": case 0: message.code = 0; @@ -7458,7 +7518,7 @@ object.errorMessage = ""; } if (message.code != null && message.hasOwnProperty("code")) - object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] : message.code; + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] : message.code; if (message.entity != null && message.hasOwnProperty("entity")) object.entity = message.entity; if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) @@ -7725,6 +7785,12 @@ else if (typeof object.index === "object") message.index = new $util.LongBits(object.index.low >>> 0, object.index.high >>> 0).toNumber(); switch (object.code) { + default: + if (typeof object.code === "number") { + message.code = object.code; + break; + } + break; case "ROW_ERROR_CODE_UNSPECIFIED": case 0: message.code = 0; @@ -7767,7 +7833,7 @@ else object.index = options.longs === String ? $util.Long.prototype.toString.call(message.index) : options.longs === Number ? new $util.LongBits(message.index.low >>> 0, message.index.high >>> 0).toNumber() : message.index; if (message.code != null && message.hasOwnProperty("code")) - object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] : message.code; + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] : message.code; if (message.message != null && message.hasOwnProperty("message")) object.message = message.message; return object; @@ -8218,6 +8284,12 @@ message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); } switch (object.dataFormat) { + default: + if (typeof object.dataFormat === "number") { + message.dataFormat = object.dataFormat; + break; + } + break; case "DATA_FORMAT_UNSPECIFIED": case 0: message.dataFormat = 0; @@ -8311,7 +8383,7 @@ if (message.expireTime != null && message.hasOwnProperty("expireTime")) object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) - object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; + object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] === undefined ? message.dataFormat : $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); if (options.oneofs) @@ -9109,6 +9181,22 @@ return ReadStream; })(); + /** + * WriteStreamView enum. + * @name google.cloud.bigquery.storage.v1.WriteStreamView + * @enum {number} + * @property {number} WRITE_STREAM_VIEW_UNSPECIFIED=0 WRITE_STREAM_VIEW_UNSPECIFIED value + * @property {number} BASIC=1 BASIC value + * @property {number} FULL=2 FULL value + */ + v1.WriteStreamView = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "WRITE_STREAM_VIEW_UNSPECIFIED"] = 0; + values[valuesById[1] = "BASIC"] = 1; + values[valuesById[2] = "FULL"] = 2; + return values; + })(); + v1.WriteStream = (function() { /** @@ -9121,6 +9209,7 @@ * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [tableSchema] WriteStream tableSchema * @property {google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null} [writeMode] WriteStream writeMode + * @property {string|null} [location] WriteStream location */ /** @@ -9186,6 +9275,14 @@ */ WriteStream.prototype.writeMode = 0; + /** + * WriteStream location. + * @member {string} location + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.location = ""; + /** * Creates a new WriteStream instance using the specified properties. * @function create @@ -9222,6 +9319,8 @@ $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.writeMode != null && Object.hasOwnProperty.call(message, "writeMode")) writer.uint32(/* id 7, wireType 0 =*/56).int32(message.writeMode); + if (message.location != null && Object.hasOwnProperty.call(message, "location")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.location); return writer; }; @@ -9280,6 +9379,10 @@ message.writeMode = reader.int32(); break; } + case 8: { + message.location = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -9351,6 +9454,9 @@ case 1: break; } + if (message.location != null && message.hasOwnProperty("location")) + if (!$util.isString(message.location)) + return "location: string expected"; return null; }; @@ -9369,6 +9475,12 @@ if (object.name != null) message.name = String(object.name); switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; case "TYPE_UNSPECIFIED": case 0: message.type = 0; @@ -9402,6 +9514,12 @@ message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.tableSchema); } switch (object.writeMode) { + default: + if (typeof object.writeMode === "number") { + message.writeMode = object.writeMode; + break; + } + break; case "WRITE_MODE_UNSPECIFIED": case 0: message.writeMode = 0; @@ -9411,6 +9529,8 @@ message.writeMode = 1; break; } + if (object.location != null) + message.location = String(object.location); return message; }; @@ -9434,11 +9554,12 @@ object.commitTime = null; object.tableSchema = null; object.writeMode = options.enums === String ? "WRITE_MODE_UNSPECIFIED" : 0; + object.location = ""; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] : message.type; + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] : message.type; if (message.createTime != null && message.hasOwnProperty("createTime")) object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); if (message.commitTime != null && message.hasOwnProperty("commitTime")) @@ -9446,7 +9567,9 @@ if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) object.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.tableSchema, options); if (message.writeMode != null && message.hasOwnProperty("writeMode")) - object.writeMode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] : message.writeMode; + object.writeMode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] === undefined ? message.writeMode : $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] : message.writeMode; + if (message.location != null && message.hasOwnProperty("location")) + object.location = message.location; return object; }; @@ -10049,6 +10172,12 @@ if (object.name != null) message.name = String(object.name); switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; case "TYPE_UNSPECIFIED": case 0: message.type = 0; @@ -10115,6 +10244,12 @@ break; } switch (object.mode) { + default: + if (typeof object.mode === "number") { + message.mode = object.mode; + break; + } + break; case "MODE_UNSPECIFIED": case 0: message.mode = 0; @@ -10213,9 +10348,9 @@ if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; if (message.mode != null && message.hasOwnProperty("mode")) - object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] : message.mode; + object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] === undefined ? message.mode : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] : message.mode; if (message.fields && message.fields.length) { object.fields = []; for (var j = 0; j < message.fields.length; ++j) @@ -12511,6 +12646,12 @@ message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); } switch (object.shardingStrategy) { + default: + if (typeof object.shardingStrategy === "number") { + message.shardingStrategy = object.shardingStrategy; + break; + } + break; case "SHARDING_STRATEGY_UNSPECIFIED": case 0: message.shardingStrategy = 0; @@ -12573,7 +12714,7 @@ if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; return object; }; @@ -12901,6 +13042,12 @@ message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); } switch (object.format) { + default: + if (typeof object.format === "number") { + message.format = object.format; + break; + } + break; case "DATA_FORMAT_UNSPECIFIED": case 0: message.format = 0; @@ -12915,6 +13062,12 @@ break; } switch (object.shardingStrategy) { + default: + if (typeof object.shardingStrategy === "number") { + message.shardingStrategy = object.shardingStrategy; + break; + } + break; case "SHARDING_STRATEGY_UNSPECIFIED": case 0: message.shardingStrategy = 0; @@ -12962,11 +13115,11 @@ if (message.readOptions != null && message.hasOwnProperty("readOptions")) object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); if (message.format != null && message.hasOwnProperty("format")) - object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; + object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] === undefined ? message.format : $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; return object; }; @@ -18438,6 +18591,12 @@ if (object.number != null) message.number = object.number | 0; switch (object.label) { + default: + if (typeof object.label === "number") { + message.label = object.label; + break; + } + break; case "LABEL_OPTIONAL": case 1: message.label = 1; @@ -18452,6 +18611,12 @@ break; } switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; case "TYPE_DOUBLE": case 1: message.type = 1; @@ -18578,9 +18743,9 @@ if (message.number != null && message.hasOwnProperty("number")) object.number = message.number; if (message.label != null && message.hasOwnProperty("label")) - object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; + object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] === undefined ? message.label : $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; + object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] === undefined ? message.type : $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; if (message.typeName != null && message.hasOwnProperty("typeName")) object.typeName = message.typeName; if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) @@ -20927,6 +21092,12 @@ if (object.javaStringCheckUtf8 != null) message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); switch (object.optimizeFor) { + default: + if (typeof object.optimizeFor === "number") { + message.optimizeFor = object.optimizeFor; + break; + } + break; case "SPEED": case 1: message.optimizeFor = 1; @@ -21035,7 +21206,7 @@ if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) object.javaOuterClassname = message.javaOuterClassname; if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) - object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; + object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] === undefined ? message.optimizeFor : $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) object.javaMultipleFiles = message.javaMultipleFiles; if (message.goPackage != null && message.hasOwnProperty("goPackage")) @@ -21488,6 +21659,7 @@ * @property {boolean|null} [deprecated] FieldOptions deprecated * @property {boolean|null} [weak] FieldOptions weak * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption + * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference */ @@ -21573,6 +21745,14 @@ */ FieldOptions.prototype.uninterpretedOption = $util.emptyArray; + /** + * FieldOptions .google.cloud.bigquery.storage.v1.columnName. + * @member {string} .google.cloud.bigquery.storage.v1.columnName + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.cloud.bigquery.storage.v1.columnName"] = null; + /** * FieldOptions .google.api.fieldBehavior. * @member {Array.} .google.api.fieldBehavior @@ -21638,6 +21818,8 @@ } if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && Object.hasOwnProperty.call(message, ".google.cloud.bigquery.storage.v1.columnName")) + writer.uint32(/* id 454943157, wireType 2 =*/3639545258).string(message[".google.cloud.bigquery.storage.v1.columnName"]); return writer; }; @@ -21706,6 +21888,10 @@ message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; } + case 454943157: { + message[".google.cloud.bigquery.storage.v1.columnName"] = reader.string(); + break; + } case 1052: { if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) message[".google.api.fieldBehavior"] = []; @@ -21798,6 +21984,9 @@ return "uninterpretedOption." + error; } } + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) + if (!$util.isString(message[".google.cloud.bigquery.storage.v1.columnName"])) + return ".google.cloud.bigquery.storage.v1.columnName: string expected"; if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { if (!Array.isArray(message[".google.api.fieldBehavior"])) return ".google.api.fieldBehavior: array expected"; @@ -21837,6 +22026,12 @@ return object; var message = new $root.google.protobuf.FieldOptions(); switch (object.ctype) { + default: + if (typeof object.ctype === "number") { + message.ctype = object.ctype; + break; + } + break; case "STRING": case 0: message.ctype = 0; @@ -21853,6 +22048,12 @@ if (object.packed != null) message.packed = Boolean(object.packed); switch (object.jstype) { + default: + if (typeof object.jstype === "number") { + message.jstype = object.jstype; + break; + } + break; case "JS_NORMAL": case 0: message.jstype = 0; @@ -21884,6 +22085,8 @@ message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } + if (object[".google.cloud.bigquery.storage.v1.columnName"] != null) + message[".google.cloud.bigquery.storage.v1.columnName"] = String(object[".google.cloud.bigquery.storage.v1.columnName"]); if (object[".google.api.fieldBehavior"]) { if (!Array.isArray(object[".google.api.fieldBehavior"])) throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); @@ -21891,6 +22094,10 @@ for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) switch (object[".google.api.fieldBehavior"][i]) { default: + if (typeof object[".google.api.fieldBehavior"][i] === "number") { + message[".google.api.fieldBehavior"][i] = object[".google.api.fieldBehavior"][i]; + break; + } case "FIELD_BEHAVIOR_UNSPECIFIED": case 0: message[".google.api.fieldBehavior"][i] = 0; @@ -21959,9 +22166,10 @@ object.weak = false; object.unverifiedLazy = false; object[".google.api.resourceReference"] = null; + object[".google.cloud.bigquery.storage.v1.columnName"] = null; } if (message.ctype != null && message.hasOwnProperty("ctype")) - object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; + object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] === undefined ? message.ctype : $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; if (message.packed != null && message.hasOwnProperty("packed")) object.packed = message.packed; if (message.deprecated != null && message.hasOwnProperty("deprecated")) @@ -21969,7 +22177,7 @@ if (message.lazy != null && message.hasOwnProperty("lazy")) object.lazy = message.lazy; if (message.jstype != null && message.hasOwnProperty("jstype")) - object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; + object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] === undefined ? message.jstype : $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; if (message.weak != null && message.hasOwnProperty("weak")) object.weak = message.weak; if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) @@ -21982,10 +22190,12 @@ if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { object[".google.api.fieldBehavior"] = []; for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) - object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; + object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] === undefined ? message[".google.api.fieldBehavior"][j] : $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; } if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) + object[".google.cloud.bigquery.storage.v1.columnName"] = message[".google.cloud.bigquery.storage.v1.columnName"]; return object; }; @@ -23339,6 +23549,12 @@ if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); switch (object.idempotencyLevel) { + default: + if (typeof object.idempotencyLevel === "number") { + message.idempotencyLevel = object.idempotencyLevel; + break; + } + break; case "IDEMPOTENCY_UNKNOWN": case 0: message.idempotencyLevel = 0; @@ -23402,7 +23618,7 @@ if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) - object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; + object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] === undefined ? message.idempotencyLevel : $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -25155,6 +25371,12 @@ if (object.end != null) message.end = object.end | 0; switch (object.semantic) { + default: + if (typeof object.semantic === "number") { + message.semantic = object.semantic; + break; + } + break; case "NONE": case 0: message.semantic = 0; @@ -25204,7 +25426,7 @@ if (message.end != null && message.hasOwnProperty("end")) object.end = message.end; if (message.semantic != null && message.hasOwnProperty("semantic")) - object.semantic = options.enums === String ? $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] : message.semantic; + object.semantic = options.enums === String ? $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] === undefined ? message.semantic : $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] : message.semantic; return object; }; @@ -29077,6 +29299,12 @@ if (object.nameField != null) message.nameField = String(object.nameField); switch (object.history) { + default: + if (typeof object.history === "number") { + message.history = object.history; + break; + } + break; case "HISTORY_UNSPECIFIED": case 0: message.history = 0; @@ -29101,6 +29329,10 @@ for (var i = 0; i < object.style.length; ++i) switch (object.style[i]) { default: + if (typeof object.style[i] === "number") { + message.style[i] = object.style[i]; + break; + } case "STYLE_UNSPECIFIED": case 0: message.style[i] = 0; @@ -29148,7 +29380,7 @@ if (message.nameField != null && message.hasOwnProperty("nameField")) object.nameField = message.nameField; if (message.history != null && message.hasOwnProperty("history")) - object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] : message.history; + object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] === undefined ? message.history : $root.google.api.ResourceDescriptor.History[message.history] : message.history; if (message.plural != null && message.hasOwnProperty("plural")) object.plural = message.plural; if (message.singular != null && message.hasOwnProperty("singular")) @@ -29156,7 +29388,7 @@ if (message.style && message.style.length) { object.style = []; for (var j = 0; j < message.style.length; ++j) - object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; + object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] === undefined ? message.style[j] : $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; } return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index e2fa6466986..e8388095ced 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -12,14 +12,27 @@ "options": { "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", + "java_package": "com.google.cloud.bigquery.storage.v1", "java_multiple_files": true, "java_outer_classname": "TableProto", - "java_package": "com.google.cloud.bigquery.storage.v1", "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1", "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" }, "nested": { + "_columnName": { + "oneof": [ + "columnName" + ] + }, + "columnName": { + "type": "string", + "id": 454943157, + "extend": "google.protobuf.FieldOptions", + "options": { + "proto3_optional": true + } + }, "ArrowSchema": { "fields": { "serializedSchema": { @@ -566,6 +579,10 @@ "(google.api.field_behavior)": "REQUIRED", "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" } + }, + "view": { + "type": "WriteStreamView", + "id": 3 } } }, @@ -864,6 +881,13 @@ } } }, + "WriteStreamView": { + "values": { + "WRITE_STREAM_VIEW_UNSPECIFIED": 0, + "BASIC": 1, + "FULL": 2 + } + }, "WriteStream": { "options": { "(google.api.resource).type": "bigquerystorage.googleapis.com/WriteStream", @@ -911,6 +935,13 @@ "options": { "(google.api.field_behavior)": "IMMUTABLE" } + }, + "location": { + "type": "string", + "id": 8, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } } }, "nested": { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 2d83ddbe23b..79b10927e9b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -33,6 +33,11 @@ function main(name) { * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. */ // const name = 'abc123' + /** + * Indicates whether to get full or partial view of the WriteStream. If + * not set, view returned will be basic. + */ + // const view = {} // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 6ff22b675bd..2e0e3c58628 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -258,7 +258,7 @@ "segments": [ { "start": 25, - "end": 54, + "end": 59, "type": "FULL" } ], @@ -270,6 +270,10 @@ { "name": "name", "type": "TYPE_STRING" + }, + { + "name": "view", + "type": ".google.cloud.bigquery.storage.v1.WriteStreamView" } ], "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 7ba3e1e633e..1a78d38a741 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -485,7 +485,7 @@ export class BigQueryReadClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'read_session.table': request.readSession!.table || '', + 'read_session.table': request.readSession!.table ?? '', }); this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); @@ -601,7 +601,7 @@ export class BigQueryReadClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - name: request.name || '', + name: request.name ?? '', }); this.initialize(); return this.innerApiCalls.splitReadStream(request, options, callback); @@ -644,7 +644,7 @@ export class BigQueryReadClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - read_stream: request.readStream || '', + read_stream: request.readStream ?? '', }); this.initialize(); return this.innerApiCalls.readRows(request, options); diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json index f0274ac3660..d730716117c 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json +++ b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json @@ -1,4 +1,5 @@ [ + "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", "../../protos/google/cloud/bigquery/storage/v1/avro.proto", "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 10a5b597c6f..77b988cef24 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -459,7 +459,7 @@ export class BigQueryWriteClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - parent: request.parent || '', + parent: request.parent ?? '', }); this.initialize(); return this.innerApiCalls.createWriteStream(request, options, callback); @@ -472,6 +472,9 @@ export class BigQueryWriteClient { * @param {string} request.name * Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {google.cloud.bigquery.storage.v1.WriteStreamView} request.view + * Indicates whether to get full or partial view of the WriteStream. If + * not set, view returned will be basic. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. @@ -557,7 +560,7 @@ export class BigQueryWriteClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - name: request.name || '', + name: request.name ?? '', }); this.initialize(); return this.innerApiCalls.getWriteStream(request, options, callback); @@ -656,7 +659,7 @@ export class BigQueryWriteClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - name: request.name || '', + name: request.name ?? '', }); this.initialize(); return this.innerApiCalls.finalizeWriteStream(request, options, callback); @@ -761,7 +764,7 @@ export class BigQueryWriteClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - parent: request.parent || '', + parent: request.parent ?? '', }); this.initialize(); return this.innerApiCalls.batchCommitWriteStreams( @@ -866,7 +869,7 @@ export class BigQueryWriteClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - write_stream: request.writeStream || '', + write_stream: request.writeStream ?? '', }); this.initialize(); return this.innerApiCalls.flushRows(request, options, callback); diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json index f0274ac3660..d730716117c 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json +++ b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json @@ -1,4 +1,5 @@ [ + "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", "../../protos/google/cloud/bigquery/storage/v1/avro.proto", "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 9b545b1af42..19120696d61 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -473,8 +473,8 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'table_reference.project_id': request.tableReference!.projectId || '', - 'table_reference.dataset_id': request.tableReference!.datasetId || '', + 'table_reference.project_id': request.tableReference!.projectId ?? '', + 'table_reference.dataset_id': request.tableReference!.datasetId ?? '', }); this.initialize(); return this.innerApiCalls.createReadSession(request, options, callback); @@ -578,7 +578,7 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'session.name': request.session!.name || '', + 'session.name': request.session!.name ?? '', }); this.initialize(); return this.innerApiCalls.batchCreateReadSessionStreams( @@ -692,7 +692,7 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'stream.name': request.stream!.name || '', + 'stream.name': request.stream!.name ?? '', }); this.initialize(); return this.innerApiCalls.finalizeStream(request, options, callback); @@ -809,7 +809,7 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'original_stream.name': request.originalStream!.name || '', + 'original_stream.name': request.originalStream!.name ?? '', }); this.initialize(); return this.innerApiCalls.splitReadStream(request, options, callback); @@ -852,7 +852,7 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'read_position.stream.name': request.readPosition!.stream!.name || '', + 'read_position.stream.name': request.readPosition!.stream!.name ?? '', }); this.initialize(); return this.innerApiCalls.readRows(request, options); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 18113a85423..cd2a50bb2c1 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -27,6 +27,21 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + function generateSampleMessage(instance: T) { const filledObject = ( instance.constructor as typeof protobuf.Message @@ -180,27 +195,27 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() ); - request.readSession = {}; - request.readSession.table = ''; - const expectedHeaderRequestParams = 'read_session.table='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadSession() ); client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); const [response] = await client.createReadSession(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession without error using callback', async () => { @@ -212,16 +227,13 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() ); - request.readSession = {}; - request.readSession.table = ''; - const expectedHeaderRequestParams = 'read_session.table='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadSession() ); @@ -244,11 +256,14 @@ describe('v1.BigQueryReadClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession with error', async () => { @@ -260,27 +275,27 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() ); - request.readSession = {}; - request.readSession.table = ''; - const expectedHeaderRequestParams = 'read_session.table='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.createReadSession = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.createReadSession(request), expectedError); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession with closed client', async () => { @@ -292,8 +307,12 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() ); - request.readSession = {}; - request.readSession.table = ''; + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.createReadSession(request), expectedError); @@ -310,26 +329,26 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() ); client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); const [response] = await client.splitReadStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream without error using callback', async () => { @@ -341,15 +360,12 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() ); @@ -372,11 +388,14 @@ describe('v1.BigQueryReadClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream with error', async () => { @@ -388,26 +407,26 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.splitReadStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.splitReadStream(request), expectedError); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream with closed client', async () => { @@ -419,7 +438,11 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() ); - request.name = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.splitReadStream(request), expectedError); @@ -436,15 +459,12 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() ); - request.readStream = ''; - const expectedHeaderRequestParams = 'read_stream='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() ); @@ -465,11 +485,14 @@ describe('v1.BigQueryReadClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.readRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions) - ); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes readRows with error', async () => { @@ -481,15 +504,12 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() ); - request.readStream = ''; - const expectedHeaderRequestParams = 'read_stream='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.readRows = stubServerStreamingCall( undefined, @@ -510,11 +530,14 @@ describe('v1.BigQueryReadClient', () => { }); }); await assert.rejects(promise, expectedError); - assert( - (client.innerApiCalls.readRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions) - ); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes readRows with closed client', async () => { @@ -526,7 +549,11 @@ describe('v1.BigQueryReadClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() ); - request.readStream = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); const stream = client.readRows(request); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 7e38e445bce..3623068f487 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -27,6 +27,21 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + function generateSampleMessage(instance: T) { const filledObject = ( instance.constructor as typeof protobuf.Message @@ -182,30 +197,33 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() ); - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; - const expectedHeaderRequestParams = - 'table_reference.project_id=&table_reference.dataset_id='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() ); client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); const [response] = await client.createReadSession(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession without error using callback', async () => { @@ -217,19 +235,19 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() ); - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; - const expectedHeaderRequestParams = - 'table_reference.project_id=&table_reference.dataset_id='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() ); @@ -252,11 +270,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession with error', async () => { @@ -268,30 +289,33 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() ); - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; - const expectedHeaderRequestParams = - 'table_reference.project_id=&table_reference.dataset_id='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; const expectedError = new Error('expected'); client.innerApiCalls.createReadSession = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.createReadSession(request), expectedError); - assert( - (client.innerApiCalls.createReadSession as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createReadSession with closed client', async () => { @@ -303,10 +327,18 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() ); - request.tableReference = {}; - request.tableReference.projectId = ''; - request.tableReference = {}; - request.tableReference.datasetId = ''; + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.createReadSession(request), expectedError); @@ -323,16 +355,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() ); - request.session = {}; - request.session.name = ''; - const expectedHeaderRequestParams = 'session.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() ); @@ -340,11 +369,14 @@ describe('v1beta1.BigQueryStorageClient', () => { stubSimpleCall(expectedResponse); const [response] = await client.batchCreateReadSessionStreams(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCreateReadSessionStreams without error using callback', async () => { @@ -356,16 +388,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() ); - request.session = {}; - request.session.name = ''; - const expectedHeaderRequestParams = 'session.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() ); @@ -388,11 +417,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCreateReadSessionStreams with error', async () => { @@ -404,16 +436,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() ); - request.session = {}; - request.session.name = ''; - const expectedHeaderRequestParams = 'session.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( undefined, @@ -423,11 +452,14 @@ describe('v1beta1.BigQueryStorageClient', () => { client.batchCreateReadSessionStreams(request), expectedError ); - assert( - (client.innerApiCalls.batchCreateReadSessionStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCreateReadSessionStreams with closed client', async () => { @@ -439,8 +471,12 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() ); - request.session = {}; - request.session.name = ''; + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects( @@ -460,27 +496,27 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() ); - request.stream = {}; - request.stream.name = ''; - const expectedHeaderRequestParams = 'stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.protobuf.Empty() ); client.innerApiCalls.finalizeStream = stubSimpleCall(expectedResponse); const [response] = await client.finalizeStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.finalizeStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeStream without error using callback', async () => { @@ -492,16 +528,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() ); - request.stream = {}; - request.stream.name = ''; - const expectedHeaderRequestParams = 'stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.protobuf.Empty() ); @@ -524,11 +557,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.finalizeStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeStream with error', async () => { @@ -540,27 +576,27 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() ); - request.stream = {}; - request.stream.name = ''; - const expectedHeaderRequestParams = 'stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.finalizeStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.finalizeStream(request), expectedError); - assert( - (client.innerApiCalls.finalizeStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeStream with closed client', async () => { @@ -572,8 +608,12 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() ); - request.stream = {}; - request.stream.name = ''; + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.finalizeStream(request), expectedError); @@ -590,27 +630,27 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() ); - request.originalStream = {}; - request.originalStream.name = ''; - const expectedHeaderRequestParams = 'original_stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() ); client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); const [response] = await client.splitReadStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream without error using callback', async () => { @@ -622,16 +662,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() ); - request.originalStream = {}; - request.originalStream.name = ''; - const expectedHeaderRequestParams = 'original_stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() ); @@ -654,11 +691,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream with error', async () => { @@ -670,27 +710,27 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() ); - request.originalStream = {}; - request.originalStream.name = ''; - const expectedHeaderRequestParams = 'original_stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.splitReadStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.splitReadStream(request), expectedError); - assert( - (client.innerApiCalls.splitReadStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes splitReadStream with closed client', async () => { @@ -702,8 +742,12 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() ); - request.originalStream = {}; - request.originalStream.name = ''; + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.splitReadStream(request), expectedError); @@ -720,17 +764,14 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() ); - request.readPosition = {}; - request.readPosition.stream = {}; - request.readPosition.stream.name = ''; - const expectedHeaderRequestParams = 'read_position.stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() ); @@ -751,11 +792,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.readRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions) - ); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes readRows with error', async () => { @@ -767,17 +811,14 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() ); - request.readPosition = {}; - request.readPosition.stream = {}; - request.readPosition.stream.name = ''; - const expectedHeaderRequestParams = 'read_position.stream.name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.readRows = stubServerStreamingCall( undefined, @@ -798,11 +839,14 @@ describe('v1beta1.BigQueryStorageClient', () => { }); }); await assert.rejects(promise, expectedError); - assert( - (client.innerApiCalls.readRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions) - ); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes readRows with closed client', async () => { @@ -814,9 +858,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() ); - request.readPosition = {}; - request.readPosition.stream = {}; - request.readPosition.stream.name = ''; + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); const stream = client.readRows(request); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index c27c4dfd91f..830e18a06bb 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -27,6 +27,21 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + function generateSampleMessage(instance: T) { const filledObject = ( instance.constructor as typeof protobuf.Message @@ -175,26 +190,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.WriteStream() ); client.innerApiCalls.createWriteStream = stubSimpleCall(expectedResponse); const [response] = await client.createWriteStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createWriteStream without error using callback', async () => { @@ -206,15 +221,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.WriteStream() ); @@ -237,11 +249,14 @@ describe('v1.BigQueryWriteClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.createWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createWriteStream with error', async () => { @@ -253,26 +268,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.createWriteStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.createWriteStream(request), expectedError); - assert( - (client.innerApiCalls.createWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes createWriteStream with closed client', async () => { @@ -284,7 +299,11 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() ); - request.parent = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.createWriteStream(request), expectedError); @@ -301,26 +320,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.WriteStream() ); client.innerApiCalls.getWriteStream = stubSimpleCall(expectedResponse); const [response] = await client.getWriteStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.getWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes getWriteStream without error using callback', async () => { @@ -332,15 +351,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.WriteStream() ); @@ -363,11 +379,14 @@ describe('v1.BigQueryWriteClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.getWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes getWriteStream with error', async () => { @@ -379,26 +398,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.getWriteStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.getWriteStream(request), expectedError); - assert( - (client.innerApiCalls.getWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes getWriteStream with closed client', async () => { @@ -410,7 +429,11 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() ); - request.name = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.getWriteStream(request), expectedError); @@ -427,15 +450,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() ); @@ -443,11 +463,14 @@ describe('v1.BigQueryWriteClient', () => { stubSimpleCall(expectedResponse); const [response] = await client.finalizeWriteStream(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.finalizeWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeWriteStream without error using callback', async () => { @@ -459,15 +482,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() ); @@ -490,11 +510,14 @@ describe('v1.BigQueryWriteClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.finalizeWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeWriteStream with error', async () => { @@ -506,26 +529,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() ); - request.name = ''; - const expectedHeaderRequestParams = 'name='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.finalizeWriteStream = stubSimpleCall( undefined, expectedError ); await assert.rejects(client.finalizeWriteStream(request), expectedError); - assert( - (client.innerApiCalls.finalizeWriteStream as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes finalizeWriteStream with closed client', async () => { @@ -537,7 +560,11 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() ); - request.name = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.finalizeWriteStream(request), expectedError); @@ -554,15 +581,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() ); @@ -570,11 +594,14 @@ describe('v1.BigQueryWriteClient', () => { stubSimpleCall(expectedResponse); const [response] = await client.batchCommitWriteStreams(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.batchCommitWriteStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCommitWriteStreams without error using callback', async () => { @@ -586,15 +613,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() ); @@ -617,11 +641,14 @@ describe('v1.BigQueryWriteClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.batchCommitWriteStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCommitWriteStreams with error', async () => { @@ -633,15 +660,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() ); - request.parent = ''; - const expectedHeaderRequestParams = 'parent='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall( undefined, @@ -651,11 +675,14 @@ describe('v1.BigQueryWriteClient', () => { client.batchCommitWriteStreams(request), expectedError ); - assert( - (client.innerApiCalls.batchCommitWriteStreams as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes batchCommitWriteStreams with closed client', async () => { @@ -667,7 +694,11 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() ); - request.parent = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects( @@ -687,26 +718,26 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() ); - request.writeStream = ''; - const expectedHeaderRequestParams = 'write_stream='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() ); client.innerApiCalls.flushRows = stubSimpleCall(expectedResponse); const [response] = await client.flushRows(request); assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.flushRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes flushRows without error using callback', async () => { @@ -718,15 +749,12 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() ); - request.writeStream = ''; - const expectedHeaderRequestParams = 'write_stream='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() ); @@ -749,11 +777,14 @@ describe('v1.BigQueryWriteClient', () => { }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.flushRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions /*, callback defined above */) - ); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes flushRows with error', async () => { @@ -765,23 +796,23 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() ); - request.writeStream = ''; - const expectedHeaderRequestParams = 'write_stream='; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; const expectedError = new Error('expected'); client.innerApiCalls.flushRows = stubSimpleCall(undefined, expectedError); await assert.rejects(client.flushRows(request), expectedError); - assert( - (client.innerApiCalls.flushRows as SinonStub) - .getCall(0) - .calledWith(request, expectedOptions, undefined) - ); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); it('invokes flushRows with closed client', async () => { @@ -793,7 +824,11 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() ); - request.writeStream = ''; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.flushRows(request), expectedError); @@ -810,6 +845,7 @@ describe('v1.BigQueryWriteClient', () => { const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() ); + const expectedResponse = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse() ); From 40fe6aaeb55ed0a2d98d8c016f69b06c43104168 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Thu, 10 Nov 2022 17:33:26 -0800 Subject: [PATCH 197/333] Update .OwlBot.lock.yaml --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 4d586c42063..e97989708da 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:b15a6f06cc06dcffa11e1bebdf1a74b6775a134aac24a0f86f51ddf728eb373e -# created: 2022-08-26T22:34:55.905845397Z + digest: sha256:5b05f26103855c3a15433141389c478d1d3fe088fb5d4e3217c4793f6b3f245e +# created: 2022-11-04 From bd13045e8d2611fd2076a6db01639492085d5d49 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Nov 2022 01:40:38 +0000 Subject: [PATCH 198/333] docs: remove stale header guidance for AppendRows (#299) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 485941276 Source-Link: https://togithub.com/googleapis/googleapis/commit/a5f5928e736ea88c03e48c506a19fa632b43de9e Source-Link: https://togithub.com/googleapis/googleapis-gen/commit/61ebfaa325101bc9b29ee34900b45b2f0d23981e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlYmZhYTMyNTEwMWJjOWIyOWVlMzQ5MDBiNDViMmYwZDIzOTgxZSJ9 BEGIN_NESTED_COMMIT chore: override API mixins when needed PiperOrigin-RevId: 477248447 Source-Link: https://togithub.com/googleapis/googleapis/commit/4689c7380444972caf11fd1b96e7ec1f864b7dfb Source-Link: https://togithub.com/googleapis/googleapis-gen/commit/c4059786a5cd805a0151d95b477fbc486bcbcedc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzQwNTk3ODZhNWNkODA1YTAxNTFkOTViNDc3ZmJjNDg2YmNiY2VkYyJ9 END_NESTED_COMMIT --- .../protos/google/cloud/bigquery/storage/v1/storage.proto | 7 ------- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- .../bigquery-storage/src/v1/big_query_write_client.ts | 7 ------- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index a49e1a389c2..df602135b9d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -167,13 +167,6 @@ service BigQueryWrite { // * For PENDING streams, data is not made visible until the stream itself is // finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly // committed via the `BatchCommitWriteStreams` rpc. - // - // Note: For users coding against the gRPC api directly, it may be - // necessary to supply the x-goog-request-params system parameter - // with `write_stream=`. - // - // More information about system parameters: - // https://cloud.google.com/apis/docs/system-parameters rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) { option (google.api.http) = { post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 2e0e3c58628..a2be29a2776 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -199,7 +199,7 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", "title": "BigQueryRead appendRows Sample", "origin": "API_DEFINITION", - "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc. Note: For users coding against the gRPC api directly, it may be necessary to supply the x-goog-request-params system parameter with `write_stream=`. More information about system parameters: https://cloud.google.com/apis/docs/system-parameters", + "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc.", "canonical": true, "file": "big_query_write.append_rows.js", "language": "JAVASCRIPT", diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 77b988cef24..6e002a67e7c 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -908,13 +908,6 @@ export class BigQueryWriteClient { * finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly * committed via the `BatchCommitWriteStreams` rpc. * - * Note: For users coding against the gRPC api directly, it may be - * necessary to supply the x-goog-request-params system parameter - * with `write_stream=`. - * - * More information about system parameters: - * https://cloud.google.com/apis/docs/system-parameters - * * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} From 3dcca2c59e796c8e865d7a07902bdaec473eb2e7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Nov 2022 02:48:17 +0100 Subject: [PATCH 199/333] chore(deps): update dependency jsdoc to v4 (#302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jsdoc](https://togithub.com/jsdoc/jsdoc) | [`^3.6.3` -> `^4.0.0`](https://renovatebot.com/diffs/npm/jsdoc/3.6.11/4.0.0) | [![age](https://badges.renovateapi.com/packages/npm/jsdoc/4.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/jsdoc/4.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/jsdoc/4.0.0/compatibility-slim/3.6.11)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/jsdoc/4.0.0/confidence-slim/3.6.11)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a74cc2991f9..477b77d3bd8 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -35,7 +35,7 @@ "@types/sinon": "^10.0.0", "c8": "^7.1.0", "gts": "^3.1.0", - "jsdoc": "^3.6.3", + "jsdoc": "^4.0.0", "jsdoc-fresh": "^2.0.0", "jsdoc-region-tag": "^2.0.0", "linkinator": "^4.0.0", From f0966e42ff4d8372ff828d336726964912701c26 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Nov 2022 10:34:13 +0100 Subject: [PATCH 200/333] chore(deps): update dependency @types/node to v18 (#301) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [@types/node](https://togithub.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node) ([source](https://togithub.com/DefinitelyTyped/DefinitelyTyped)) | [`^16.0.0` -> `^18.0.0`](https://renovatebot.com/diffs/npm/@types%2fnode/16.18.3/18.11.9) | [![age](https://badges.renovateapi.com/packages/npm/@types%2fnode/18.11.9/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/@types%2fnode/18.11.9/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/@types%2fnode/18.11.9/compatibility-slim/16.18.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/@types%2fnode/18.11.9/confidence-slim/16.18.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 477b77d3bd8..4662fb96c9d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -31,7 +31,7 @@ }, "devDependencies": { "@types/mocha": "^9.0.0", - "@types/node": "^16.0.0", + "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", "c8": "^7.1.0", "gts": "^3.1.0", From 08c5fa13e36a149f2ba9b2919c0a072648051bfe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 11 Nov 2022 10:04:13 +0000 Subject: [PATCH 201/333] chore(main): release 3.2.0 (#298) :robot: I have created a release *beep* *boop* --- ## [3.2.0](https://togithub.com/googleapis/nodejs-bigquery-storage/compare/v3.1.1...v3.2.0) (2022-11-11) ### Features * Add location to WriteStream and add WriteStreamView support ([#295](https://togithub.com/googleapis/nodejs-bigquery-storage/issues/295)) ([ba3c5ef](https://togithub.com/googleapis/nodejs-bigquery-storage/commit/ba3c5ef05366b1e9a542b9b13fc0c7a25118b2a3)) --- This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please). --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 33229aabf90..eeb46ea09fd 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [3.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.1...v3.2.0) (2022-11-11) + + +### Features + +* Add location to WriteStream and add WriteStreamView support ([#295](https://github.com/googleapis/nodejs-bigquery-storage/issues/295)) ([ba3c5ef](https://github.com/googleapis/nodejs-bigquery-storage/commit/ba3c5ef05366b1e9a542b9b13fc0c7a25118b2a3)) + ## [3.1.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.0...v3.1.1) (2022-09-01) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 4662fb96c9d..2d6e2462757 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.1.1", + "version": "3.2.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index a2be29a2776..94f815441aa 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.1.1", + "version": "3.2.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 3e4305f2828..0f8c9365858 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.1.1", + "version": "3.2.0", "language": "TYPESCRIPT", "apis": [ { From bbd225dee0d708048a355b108e3ee91aaad8faa7 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Fri, 11 Nov 2022 22:30:32 -0800 Subject: [PATCH 202/333] build: update README for deprecation notice and delete all files except samples (#303) --- .../.github/.OwlBot.lock.yaml | 17 - .../bigquery-storage/.github/.OwlBot.yaml | 30 - .../bigquery-storage/.github/CODEOWNERS | 12 - .../.github/ISSUE_TEMPLATE/bug_report.md | 38 - .../.github/ISSUE_TEMPLATE/config.yml | 4 - .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/question.md | 12 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - .../bigquery-storage/.github/auto-approve.yml | 3 - .../bigquery-storage/.github/auto-label.yaml | 2 - .../.github/generated-files-bot.yml | 16 - .../.github/release-please.yml | 2 - .../.github/release-trigger.yml | 1 - .../.github/sync-repo-settings.yaml | 24 - .../.kokoro/continuous/node12/common.cfg | 24 - .../.kokoro/continuous/node12/lint.cfg | 4 - .../continuous/node12/samples-test.cfg | 7 - .../.kokoro/continuous/node12/system-test.cfg | 7 - .../.kokoro/continuous/node12/test.cfg | 0 handwritten/bigquery-storage/.kokoro/docs.sh | 25 - handwritten/bigquery-storage/.kokoro/lint.sh | 33 - .../.kokoro/presubmit/windows/common.cfg | 2 - .../.kokoro/presubmit/windows/test.cfg | 2 - .../bigquery-storage/.kokoro/publish.sh | 30 - .../.kokoro/release/docs-devsite.cfg | 26 - .../.kokoro/release/docs-devsite.sh | 29 - .../bigquery-storage/.kokoro/release/docs.cfg | 26 - .../bigquery-storage/.kokoro/release/docs.sh | 49 - .../.kokoro/release/publish.cfg | 39 - .../bigquery-storage/.kokoro/samples-test.sh | 19 +- .../bigquery-storage/.kokoro/system-test.sh | 48 +- handwritten/bigquery-storage/.kokoro/test.bat | 33 - handwritten/bigquery-storage/.kokoro/test.sh | 51 - handwritten/bigquery-storage/CHANGELOG.md | 223 - .../bigquery-storage/CODE_OF_CONDUCT.md | 94 - handwritten/bigquery-storage/CONTRIBUTING.md | 76 - handwritten/bigquery-storage/LICENSE | 202 - handwritten/bigquery-storage/README.md | 2 + .../bigquery-storage/linkinator.config.json | 16 - handwritten/bigquery-storage/owlbot.py | 15 +- handwritten/bigquery-storage/package.json | 54 - .../bigquery/storage/v1/annotations.proto | 28 - .../cloud/bigquery/storage/v1/arrow.proto | 64 - .../cloud/bigquery/storage/v1/avro.proto | 56 - .../cloud/bigquery/storage/v1/protobuf.proto | 48 - .../cloud/bigquery/storage/v1/storage.proto | 666 - .../cloud/bigquery/storage/v1/stream.proto | 286 - .../cloud/bigquery/storage/v1/table.proto | 166 - .../bigquery/storage/v1beta1/arrow.proto | 36 - .../cloud/bigquery/storage/v1beta1/avro.proto | 37 - .../storage/v1beta1/read_options.proto | 39 - .../bigquery/storage/v1beta1/storage.proto | 405 - .../storage/v1beta1/table_reference.proto | 41 - .../bigquery-storage/protos/protos.d.ts | 11968 ------ handwritten/bigquery-storage/protos/protos.js | 29973 ---------------- .../bigquery-storage/protos/protos.json | 2817 -- handwritten/bigquery-storage/src/index.ts | 38 - .../src/v1/big_query_read_client.ts | 938 - .../src/v1/big_query_read_client_config.json | 44 - .../src/v1/big_query_read_proto_list.json | 9 - .../src/v1/big_query_write_client.ts | 1213 - .../src/v1/big_query_write_client_config.json | 73 - .../src/v1/big_query_write_proto_list.json | 9 - .../src/v1/gapic_metadata.json | 117 - handwritten/bigquery-storage/src/v1/index.ts | 20 - .../src/v1beta1/big_query_storage_client.ts | 1004 - .../big_query_storage_client_config.json | 54 - .../v1beta1/big_query_storage_proto_list.json | 7 - .../src/v1beta1/gapic_metadata.json | 68 - .../bigquery-storage/src/v1beta1/index.ts | 19 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 41 - .../bigquery-storage/system-test/install.ts | 51 - .../test/gapic_big_query_read_v1.ts | 896 - .../test/gapic_big_query_storage_v1beta1.ts | 1055 - .../test/gapic_big_query_write_v1.ts | 1246 - handwritten/bigquery-storage/tsconfig.json | 19 - .../bigquery-storage/webpack.config.js | 64 - 79 files changed, 13 insertions(+), 54958 deletions(-) delete mode 100644 handwritten/bigquery-storage/.github/.OwlBot.lock.yaml delete mode 100644 handwritten/bigquery-storage/.github/.OwlBot.yaml delete mode 100644 handwritten/bigquery-storage/.github/CODEOWNERS delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 handwritten/bigquery-storage/.github/auto-approve.yml delete mode 100644 handwritten/bigquery-storage/.github/auto-label.yaml delete mode 100644 handwritten/bigquery-storage/.github/generated-files-bot.yml delete mode 100644 handwritten/bigquery-storage/.github/release-please.yml delete mode 100644 handwritten/bigquery-storage/.github/release-trigger.yml delete mode 100644 handwritten/bigquery-storage/.github/sync-repo-settings.yaml delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg delete mode 100755 handwritten/bigquery-storage/.kokoro/docs.sh delete mode 100755 handwritten/bigquery-storage/.kokoro/lint.sh delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg delete mode 100755 handwritten/bigquery-storage/.kokoro/publish.sh delete mode 100644 handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg delete mode 100755 handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh delete mode 100644 handwritten/bigquery-storage/.kokoro/release/docs.cfg delete mode 100755 handwritten/bigquery-storage/.kokoro/release/docs.sh delete mode 100644 handwritten/bigquery-storage/.kokoro/release/publish.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/test.bat delete mode 100755 handwritten/bigquery-storage/.kokoro/test.sh delete mode 100644 handwritten/bigquery-storage/CHANGELOG.md delete mode 100644 handwritten/bigquery-storage/CODE_OF_CONDUCT.md delete mode 100644 handwritten/bigquery-storage/CONTRIBUTING.md delete mode 100644 handwritten/bigquery-storage/LICENSE delete mode 100644 handwritten/bigquery-storage/linkinator.config.json delete mode 100644 handwritten/bigquery-storage/package.json delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto delete mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto delete mode 100644 handwritten/bigquery-storage/protos/protos.d.ts delete mode 100644 handwritten/bigquery-storage/protos/protos.js delete mode 100644 handwritten/bigquery-storage/protos/protos.json delete mode 100644 handwritten/bigquery-storage/src/index.ts delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client.ts delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client_config.json delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client.ts delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client_config.json delete mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json delete mode 100644 handwritten/bigquery-storage/src/v1/gapic_metadata.json delete mode 100644 handwritten/bigquery-storage/src/v1/index.ts delete mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts delete mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json delete mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json delete mode 100644 handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json delete mode 100644 handwritten/bigquery-storage/src/v1beta1/index.ts delete mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js delete mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts delete mode 100644 handwritten/bigquery-storage/system-test/install.ts delete mode 100644 handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts delete mode 100644 handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts delete mode 100644 handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts delete mode 100644 handwritten/bigquery-storage/tsconfig.json delete mode 100644 handwritten/bigquery-storage/webpack.config.js diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml deleted file mode 100644 index e97989708da..00000000000 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:5b05f26103855c3a15433141389c478d1d3fe088fb5d4e3217c4793f6b3f245e -# created: 2022-11-04 diff --git a/handwritten/bigquery-storage/.github/.OwlBot.yaml b/handwritten/bigquery-storage/.github/.OwlBot.yaml deleted file mode 100644 index 2d27e09de99..00000000000 --- a/handwritten/bigquery-storage/.github/.OwlBot.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - -deep-preserve-regex: - - /owl-bot-staging/v1alpha2 - - /owl-bot-staging/v1beta2 - - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs/(.*) - dest: /owl-bot-staging/$1/$2 - -begin-after-commit-hash: e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a - diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS deleted file mode 100644 index 6d82f1f7467..00000000000 --- a/handwritten/bigquery-storage/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax - - -# The yoshi-nodejs team is the default owner for nodejs repositories. -* @googleapis/yoshi-nodejs @googleapis/api-bigquery - -# The github automation team is the default owner for the auto-approve file. -.github/auto-approve.yml @googleapis/github-automation diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 0ad95022413..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -labels: 'type: bug, priority: p2' ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -1) Is this a client library issue or a product issue? -This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. - -2) Did someone already solve this? - - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery-storage/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node - - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js - -3) Do you have a support contract? -Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. - -If the support paths suggested above still do not result in a resolution, please provide the following details. - -#### Environment details - - - OS: - - Node.js version: - - npm version: - - `@google-cloud/bigquery-storage` version: - -#### Steps to reproduce - - 1. ? - 2. ? - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 603b90133b6..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,4 +0,0 @@ -contact_links: - - name: Google Cloud Support - url: https://cloud.google.com/support/ - about: If you have a support contract with Google, please use the Google Cloud Support portal. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index b0327dfa02e..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library -labels: 'type: feature request, priority: p3' ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index 97323113911..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -name: Question -about: Ask a question -labels: 'type: question, priority: p3' ---- - -Thanks for stopping by to ask us a question! Please make sure to include: -- What you're trying to do -- What code you've already tried -- Any error messages you're getting - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 99586903212..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 19153139702..00000000000 --- a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml deleted file mode 100644 index 4cd91cc16ae..00000000000 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -processes: - - "NodeDependency" - - "OwlBotTemplateChanges" diff --git a/handwritten/bigquery-storage/.github/auto-label.yaml b/handwritten/bigquery-storage/.github/auto-label.yaml deleted file mode 100644 index 09c8d735b45..00000000000 --- a/handwritten/bigquery-storage/.github/auto-label.yaml +++ /dev/null @@ -1,2 +0,0 @@ -requestsize: - enabled: true diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml deleted file mode 100644 index 992ccef4a13..00000000000 --- a/handwritten/bigquery-storage/.github/generated-files-bot.yml +++ /dev/null @@ -1,16 +0,0 @@ -generatedFiles: -- path: '.kokoro/**' - message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: '.github/CODEOWNERS' - message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' -- path: '.github/workflows/ci.yaml' - message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: '.github/generated-files-bot.+(yml|yaml)' - message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: 'README.md' - message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' -- path: 'samples/README.md' - message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' -ignoreAuthors: -- 'gcf-owl-bot[bot]' -- 'yoshi-automation' diff --git a/handwritten/bigquery-storage/.github/release-please.yml b/handwritten/bigquery-storage/.github/release-please.yml deleted file mode 100644 index a1b41da3cb3..00000000000 --- a/handwritten/bigquery-storage/.github/release-please.yml +++ /dev/null @@ -1,2 +0,0 @@ -handleGHRelease: true -releaseType: node diff --git a/handwritten/bigquery-storage/.github/release-trigger.yml b/handwritten/bigquery-storage/.github/release-trigger.yml deleted file mode 100644 index d4ca94189e1..00000000000 --- a/handwritten/bigquery-storage/.github/release-trigger.yml +++ /dev/null @@ -1 +0,0 @@ -enabled: true diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml deleted file mode 100644 index 4a30a08e54c..00000000000 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,24 +0,0 @@ -branchProtectionRules: - - pattern: main - isAdminEnforced: true - requiredApprovingReviewCount: 1 - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - "ci/kokoro: Samples test" - - "ci/kokoro: System test" - - docs - - lint - - test (12) - - test (14) - - test (16) - - cla/google - - windows - - OwlBot Post Processor -permissionRules: - - team: yoshi-admins - permission: admin - - team: jsteam-admins - permission: admin - - team: jsteam - permission: push diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg deleted file mode 100644 index 7fc0cdeac69..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg deleted file mode 100644 index 0a5d546b96b..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg deleted file mode 100644 index 68b02101fc1..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg deleted file mode 100644 index 3ccb29d69f8..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/.kokoro/docs.sh b/handwritten/bigquery-storage/.kokoro/docs.sh deleted file mode 100755 index 85901242b5e..00000000000 --- a/handwritten/bigquery-storage/.kokoro/docs.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -export NPM_CONFIG_PREFIX=${HOME}/.npm-global - -cd $(dirname $0)/.. - -npm install - -npm run docs-test diff --git a/handwritten/bigquery-storage/.kokoro/lint.sh b/handwritten/bigquery-storage/.kokoro/lint.sh deleted file mode 100755 index aef4866e4c4..00000000000 --- a/handwritten/bigquery-storage/.kokoro/lint.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -export NPM_CONFIG_PREFIX=${HOME}/.npm-global - -cd $(dirname $0)/.. - -npm install - -# Install and link samples -if [ -f samples/package.json ]; then - cd samples/ - npm link ../ - npm install - cd .. -fi - -npm run lint diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg deleted file mode 100644 index d6e25e0b1b8..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg +++ /dev/null @@ -1,2 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg deleted file mode 100644 index 83de067d5f1..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg +++ /dev/null @@ -1,2 +0,0 @@ -# Use the test file directly -build_file: "nodejs-bigquery-storage/.kokoro/test.bat" diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh deleted file mode 100755 index 949e3e1d0c2..00000000000 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -export NPM_CONFIG_PREFIX=${HOME}/.npm-global - -# Start the releasetool reporter -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -cd $(dirname $0)/.. - -NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) -echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc - -npm install -npm publish --access=public --registry=https://wombat-dressing-room.appspot.com diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg deleted file mode 100644 index 8bcc62cc814..00000000000 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ /dev/null @@ -1,26 +0,0 @@ -# service account used to publish up-to-date docs. -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - -# doc publications use a Python image. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/release/docs-devsite.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh deleted file mode 100755 index 2198e67fe92..00000000000 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -if [[ -z "$CREDENTIALS" ]]; then - # if CREDENTIALS are explicitly set, assume we're testing locally - # and don't set NPM_CONFIG_PREFIX. - export NPM_CONFIG_PREFIX=${HOME}/.npm-global - export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" - cd $(dirname $0)/../.. -fi - -npm install -npm install --no-save @google-cloud/cloud-rad@^0.2.5 -npx @google-cloud/cloud-rad \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg deleted file mode 100644 index 17861c90782..00000000000 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ /dev/null @@ -1,26 +0,0 @@ -# service account used to publish up-to-date docs. -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - -# doc publications use a Python image. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/release/docs.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh deleted file mode 100755 index 1d8f3f490a5..00000000000 --- a/handwritten/bigquery-storage/.kokoro/release/docs.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# build jsdocs (Python is installed on the Node 10 docker image). -if [[ -z "$CREDENTIALS" ]]; then - # if CREDENTIALS are explicitly set, assume we're testing locally - # and don't set NPM_CONFIG_PREFIX. - export NPM_CONFIG_PREFIX=${HOME}/.npm-global - export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" - cd $(dirname $0)/../.. -fi -npm install -npm run docs - -# create docs.metadata, based on package.json and .repo-metadata.json. -npm i json@9.0.6 -g -python3 -m docuploader create-metadata \ - --name=$(cat .repo-metadata.json | json name) \ - --version=$(cat package.json | json version) \ - --language=$(cat .repo-metadata.json | json language) \ - --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ - --product-page=$(cat .repo-metadata.json | json product_documentation) \ - --github-repository=$(cat .repo-metadata.json | json repo) \ - --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) -cp docs.metadata ./docs/docs.metadata - -# deploy the docs. -if [[ -z "$CREDENTIALS" ]]; then - CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account -fi -if [[ -z "$BUCKET" ]]; then - BUCKET=docs-staging -fi -python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg deleted file mode 100644 index ba6547f468f..00000000000 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ /dev/null @@ -1,39 +0,0 @@ -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-npm-token-1" - } - } -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/publish.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index fbc058a4ec4..5228b38be94 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -1,8 +1,7 @@ #!/bin/bash - -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -13,32 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - set -eo pipefail - export NPM_CONFIG_PREFIX=${HOME}/.npm-global - # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export GCLOUD_PROJECT=long-door-651 - cd $(dirname $0)/.. - # Run a pre-test hook, if a pre-samples-test.sh is in the project if [ -f .kokoro/pre-samples-test.sh ]; then set +x . .kokoro/pre-samples-test.sh set -x fi - if [ -f samples/package.json ]; then - npm install - # Install and link samples cd samples/ - npm link ../ npm install - cd .. # If tests are running against main branch, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then @@ -50,10 +39,8 @@ if [ -f samples/package.json ]; then } trap cleanup EXIT HUP fi - - npm run samples-test + npm run test fi - # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: COVERAGE_NODE=12 diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 87fa0653d76..506e797862a 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -1,6 +1,5 @@ #!/bin/bash - -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,49 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - set -eo pipefail +echo "no-op" -export NPM_CONFIG_PREFIX=${HOME}/.npm-global - -# Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -export GCLOUD_PROJECT=long-door-651 - -cd $(dirname $0)/.. - -# Run a pre-test hook, if a pre-system-test.sh is in the project -if [ -f .kokoro/pre-system-test.sh ]; then - set +x - . .kokoro/pre-system-test.sh - set -x -fi - -npm install - -# If tests are running against main branch, configure flakybot -# to open issues on failures: -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then - export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml - export MOCHA_REPORTER=xunit - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -npm run system-test - -# codecov combines coverage across integration and unit tests. Include -# the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=12 -if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then - NYC_BIN=./node_modules/nyc/bin/nyc.js - if [ -f "$NYC_BIN" ]; then - $NYC_BIN report || true - fi - bash $KOKORO_GFILE_DIR/codecov.sh -else - echo "coverage is only reported for Node $COVERAGE_NODE" -fi diff --git a/handwritten/bigquery-storage/.kokoro/test.bat b/handwritten/bigquery-storage/.kokoro/test.bat deleted file mode 100644 index ae59e59be3e..00000000000 --- a/handwritten/bigquery-storage/.kokoro/test.bat +++ /dev/null @@ -1,33 +0,0 @@ -@rem Copyright 2018 Google LLC. All rights reserved. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. - -@echo "Starting Windows build" - -cd /d %~dp0 -cd .. - -@rem npm path is not currently set in our image, we should fix this next time -@rem we upgrade Node.js in the image: -SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm - -call nvm use v12.14.1 -call which node - -call npm install || goto :error -call npm run test || goto :error - -goto :EOF - -:error -exit /b 1 diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh deleted file mode 100755 index a5c7ac04cd3..00000000000 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -export NPM_CONFIG_PREFIX=${HOME}/.npm-global - -cd $(dirname $0)/.. - -npm install -# If tests are running against main branch, configure flakybot -# to open issues on failures: -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then - export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml - export MOCHA_REPORTER=xunit - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi -# Unit tests exercise the entire API surface, which may include -# deprecation warnings: -export MOCHA_THROW_DEPRECATION=false -npm test - -# codecov combines coverage across integration and unit tests. Include -# the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=12 -if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then - NYC_BIN=./node_modules/nyc/bin/nyc.js - if [ -f "$NYC_BIN" ]; then - $NYC_BIN report || true - fi - bash $KOKORO_GFILE_DIR/codecov.sh -else - echo "coverage is only reported for Node $COVERAGE_NODE" -fi diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md deleted file mode 100644 index eeb46ea09fd..00000000000 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ /dev/null @@ -1,223 +0,0 @@ -# Changelog - -## [3.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.1...v3.2.0) (2022-11-11) - - -### Features - -* Add location to WriteStream and add WriteStreamView support ([#295](https://github.com/googleapis/nodejs-bigquery-storage/issues/295)) ([ba3c5ef](https://github.com/googleapis/nodejs-bigquery-storage/commit/ba3c5ef05366b1e9a542b9b13fc0c7a25118b2a3)) - -## [3.1.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.0...v3.1.1) (2022-09-01) - - -### Bug Fixes - -* Allow passing gax instance to client constructor ([#292](https://github.com/googleapis/nodejs-bigquery-storage/issues/292)) ([89f953d](https://github.com/googleapis/nodejs-bigquery-storage/commit/89f953de24d774de076ce9aeef649122ab3d65a6)) -* Do not import the whole google-gax from proto JS ([#1553](https://github.com/googleapis/nodejs-bigquery-storage/issues/1553)) ([#291](https://github.com/googleapis/nodejs-bigquery-storage/issues/291)) ([507e378](https://github.com/googleapis/nodejs-bigquery-storage/commit/507e3780553fa339ffccbba9a8f9ac930d1e9c6d)) - -## [3.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.0.0...v3.1.0) (2022-08-23) - - -### Features - -* allow users to set Apache Avro output format options through avro_serialization_options param in TableReadOptions message ([#284](https://github.com/googleapis/nodejs-bigquery-storage/issues/284)) ([99b8afc](https://github.com/googleapis/nodejs-bigquery-storage/commit/99b8afc3fb2aa1d47151b90924eab2016432034f)) - - -### Bug Fixes - -* better support for fallback mode ([#287](https://github.com/googleapis/nodejs-bigquery-storage/issues/287)) ([08b0bb2](https://github.com/googleapis/nodejs-bigquery-storage/commit/08b0bb2c300ce49a65121805ea674e9c56726a87)) -* change import long to require ([#289](https://github.com/googleapis/nodejs-bigquery-storage/issues/289)) ([63a3dc2](https://github.com/googleapis/nodejs-bigquery-storage/commit/63a3dc2bcbac775e8c41dd19248ef3cd4829c21f)) -* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-bigquery-storage/issues/1546)) ([#290](https://github.com/googleapis/nodejs-bigquery-storage/issues/290)) ([1436388](https://github.com/googleapis/nodejs-bigquery-storage/commit/143638862040327e89c74c87a7018e2342576a95)) - -## [3.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v2.8.0...v3.0.0) (2022-06-29) - - -### ⚠ BREAKING CHANGES - -* update library to use Node 12 (#272) - -### Features - -* Deprecate format specific `row_count` field in Read API ([#249](https://github.com/googleapis/nodejs-bigquery-storage/issues/249)) ([fb8acf1](https://github.com/googleapis/nodejs-bigquery-storage/commit/fb8acf1f4eab7823132159bcf5927c9eda6374e2)) - - -### Bug Fixes - -* fixes for dynamic routing and streaming descriptors ([#274](https://github.com/googleapis/nodejs-bigquery-storage/issues/274)) ([4271ea0](https://github.com/googleapis/nodejs-bigquery-storage/commit/4271ea0aaa98286696eb6822d0bef82a655a5811)) -* Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time ([#279](https://github.com/googleapis/nodejs-bigquery-storage/issues/279)) ([849cc23](https://github.com/googleapis/nodejs-bigquery-storage/commit/849cc237081e63a585264a62d49e9407d2f14450)) - - -### Build System - -* update library to use Node 12 ([#272](https://github.com/googleapis/nodejs-bigquery-storage/issues/272)) ([5e774e6](https://github.com/googleapis/nodejs-bigquery-storage/commit/5e774e614132f189362d56c502960d87200a11a0)) - -## [2.8.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.7.0...v2.8.0) (2021-12-30) - - -### Features - -* add write_mode support for BigQuery Storage Write API v1 ([#228](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/228)) ([18f3123](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/18f3123275716d49460f77cbbc1a4547412087d2)) - -## [2.7.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.2...v2.7.0) (2021-09-27) - - -### Features - -* add BigQuery Storage Write API v1 ([#209](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/209)) ([e0401d9](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e0401d96480cd192a2fad8075884d2a8abd417ca)) - -### [2.6.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.1...v2.6.2) (2021-09-07) - - -### Bug Fixes - -* **deps:** update dependency snappy to v7 ([#196](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/196)) ([37538ec](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/37538ec42815d0ce325416b4ee299ca3fb7b59fe)) - -### [2.6.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.0...v2.6.1) (2021-09-03) - - -### Bug Fixes - -* **build:** migrate to main branch ([#204](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/204)) ([759c9f0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/759c9f0442f9cec7eec94055da87b17ba7ef18ad)) - -## [2.6.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.4...v2.6.0) (2021-08-23) - - -### Features - -* turns on self-signed JWT feature flag ([#200](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/200)) ([ef2206c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ef2206cf1087c23d144fbc4b50363efb4c6deab2)) - -### [2.5.4](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.3...v2.5.4) (2021-08-17) - - -### Bug Fixes - -* **deps:** google-gax v2.24.1 ([#198](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/198)) ([c6f70de](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/c6f70de43641ee7a00237884cf3f40bbf1bed502)) - -### [2.5.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.2...v2.5.3) (2021-07-21) - - -### Bug Fixes - -* **deps:** google-gax v2.17.1 ([#188](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/188)) ([e49f7ee](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e49f7ee0413948779842b3b9d4faf5addc4c4db6)) -* Updating WORKSPACE files to use the newest version of the Typescript generator. ([#190](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/190)) ([8649cc6](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/8649cc6ae0a4e6ae807ba9e5af438ca0ffc9592a)) - -### [2.5.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.1...v2.5.2) (2021-06-30) - - -### Bug Fixes - -* **deps:** google-gax v2.17.0 with mTLS ([#185](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/185)) ([1e9b856](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/1e9b8560cb3b60a60035c965ba1dabc24ad8f0c0)) - -### [2.5.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.0...v2.5.1) (2021-06-22) - - -### Bug Fixes - -* make request optional in all cases ([#179](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/179)) ([b0beaaa](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b0beaaa280e7599f75e0a439f4ecd4a9a6c059ad)) - -## [2.5.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.4.0...v2.5.0) (2021-06-07) - - -### Features - -* Add ZSTD compression as an option for Arrow. ([#165](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/165)) ([dc5a1d0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/dc5a1d00f92f09dafbf0d3b1a9bf5ea4b5c43103)) - - -### Bug Fixes - -* **deps:** require google-gax v2.12.0 ([#158](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/158)) ([3347edd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/3347edd3781d7a37ae6a50b0d6885365bc2e4b2f)) -* GoogleAdsError missing using generator version after 1.3.0 ([#171](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/171)) ([8504761](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/850476101d06f4c3f903fb10ebb6709c1a6ffa95)) -* use require() to load JSON protos ([#161](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/161)) ([a16129f](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/a16129f7a56882e3070fa79f29b8b6018e7cd651)) - -## [2.4.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.3.0...v2.4.0) (2021-04-20) - - -### Features - -* add a Arrow compression options (Only LZ4 for now). ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) -* Return schema on first ReadRowsResponse ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) - -## [2.3.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.3...v2.3.0) (2021-01-09) - - -### Features - -* introduce style enumeration ([#135](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/135)) ([4a8f699](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/4a8f699472d67aae4300c458308c2fa4fa372592)) - -### [2.2.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.2...v2.2.3) (2020-11-25) - - -### Bug Fixes - -* **browser:** check for fetch on window ([d837dfc](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/d837dfc841cf3e77fbc2482dbabb149e2fc4f76a)) - -### [2.2.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.1...v2.2.2) (2020-11-07) - - -### Bug Fixes - -* do not modify options object, use defaultScopes ([#126](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/126)) ([6f8eb24](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6f8eb244b1b06a928641550b2390e03964a14981)) - -### [2.2.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.0...v2.2.1) (2020-07-09) - - -### Bug Fixes - -* typeo in nodejs .gitattribute ([#84](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/84)) ([ab36886](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ab36886171cc6d94f66587f715d23e8cd4603f32)) - -## [2.2.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.1.0...v2.2.0) (2020-06-19) - - -### Features - -* promote library to GA ([#75](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/75)) ([7d7a67e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/7d7a67e9198e87cdcc4911d9505a121f1a1d9549)) - -## [2.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.0.0...v2.1.0) (2020-06-12) - - -### Features - -* **secrets:** begin migration to secret manager from keystore ([#70](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/70)) ([6513e8c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6513e8cf6195740b570b39fb645d8a1adafc0580)) - - -### Bug Fixes - -* handle fallback option properly ([#73](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/73)) ([ec6b88c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ec6b88cf87bf45e0f16935b8b27f15447aa385b9)) - -## [2.0.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.1.0...v2.0.0) (2020-05-18) - - -### ⚠ BREAKING CHANGES - -* The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM. - -### Features - -* add V1 client ([#28](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/28)) ([da10a33](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/da10a33ee30a6fa0b447ef16c8b755e3ac05a87c)) -* additional type annotation ([#64](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/64)) ([2d76c0e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2d76c0e16abedfaf106db063dc00f79e38166dad)) -* drop node8 support ([#39](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/39)) ([2f66ded](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2f66ded8db03f71d3f2b37a1d91e4f3f232d5eaf)) - - -### Bug Fixes - -* regen protos and tests ([#63](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/63)) ([6293832](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6293832961eedcdd57c24edc311f2c154781e34e)) -* remove eslint, update gax, fix generated protos, run the generator ([#49](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/49)) ([b5b9492](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b5b9492a0c4b86b868a2b33c5c350301db29cc65)) - -## [1.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.0.0...v1.1.0) (2020-03-06) - - -### Features - -* deferred client initialization ([#23](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/23)) ([4741719](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/474171928bbdd5d0fb2eab7be868317f88cd18eb)) - -## 1.0.0 (2020-02-29) - - -### ⚠ BREAKING CHANGES - -* initial generation of library (#1) - -### Features - -* export protos in src/index.ts ([68b922a](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/68b922a4c242a6ad2e360758ef0658ca8451b62f)) -* initial generation of library ([#1](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/1)) ([bd42fbd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/bd42fbd45616adaf36cdf197d2b0f3c811025e39)) diff --git a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md deleted file mode 100644 index 2add2547a81..00000000000 --- a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,94 +0,0 @@ - -# Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, gender identity and expression, level of -experience, education, socio-economic status, nationality, personal appearance, -race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, or to ban temporarily or permanently any -contributor for other behaviors that they deem inappropriate, threatening, -offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. Representation of a project may be -further defined and clarified by project maintainers. - -This Code of Conduct also applies outside the project spaces when the Project -Steward has a reasonable belief that an individual's behavior may have a -negative impact on the project or its community. - -## Conflict Resolution - -We do not believe that all conflict is bad; healthy debate and disagreement -often yield positive results. However, it is never okay to be disrespectful or -to engage in behavior that violates the project’s code of conduct. - -If you see someone violating the code of conduct, you are encouraged to address -the behavior directly with those involved. Many issues can be resolved quickly -and easily, and this gives people more control over the outcome of their -dispute. If you are unable to resolve the matter for any reason, or if the -behavior is threatening or harassing, report it. We are dedicated to providing -an environment where participants feel welcome and safe. - -Reports should be directed to *googleapis-stewards@google.com*, the -Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to -receive and address reported violations of the code of conduct. They will then -work with a committee consisting of representatives from the Open Source -Programs Office and the Google Open Source Strategy team. If for any reason you -are uncomfortable reaching out to the Project Steward, please email -opensource@google.com. - -We will investigate every complaint, but you may not receive a direct response. -We will use our discretion in determining when and how to follow up on reported -incidents, which may range from not taking action to permanent expulsion from -the project and project-sponsored spaces. We will notify the accused of the -report and provide them an opportunity to discuss it before any action is taken. -The identity of the reporter will be omitted from the details of the report -supplied to the accused. In potentially harmful situations, such as ongoing -harassment or threats to anyone's safety, we may take action without notice. - -## Attribution - -This Code of Conduct is adapted from the Contributor Covenant, version 1.4, -available at -https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/handwritten/bigquery-storage/CONTRIBUTING.md b/handwritten/bigquery-storage/CONTRIBUTING.md deleted file mode 100644 index 3281e44c984..00000000000 --- a/handwritten/bigquery-storage/CONTRIBUTING.md +++ /dev/null @@ -1,76 +0,0 @@ -# How to become a contributor and submit your own code - -**Table of contents** - -* [Contributor License Agreements](#contributor-license-agreements) -* [Contributing a patch](#contributing-a-patch) -* [Running the tests](#running-the-tests) -* [Releasing the library](#releasing-the-library) - -## Contributor License Agreements - -We'd love to accept your sample apps and patches! Before we can take them, we -have to jump a couple of legal hurdles. - -Please fill out either the individual or corporate Contributor License Agreement -(CLA). - - * If you are an individual writing original source code and you're sure you - own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). - * If you work for a company that wants to allow you to contribute your work, - then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). - -Follow either of the two links above to access the appropriate CLA and -instructions for how to sign and return it. Once we receive it, we'll be able to -accept your pull requests. - -## Contributing A Patch - -1. Submit an issue describing your proposed change to the repo in question. -1. The repo owner will respond to your issue promptly. -1. If your proposed change is accepted, and you haven't already done so, sign a - Contributor License Agreement (see details above). -1. Fork the desired repo, develop and test your code changes. -1. Ensure that your code adheres to the existing style in the code to which - you are contributing. -1. Ensure that your code has an appropriate set of tests which all pass. -1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. -1. Submit a pull request. - -### Before you begin - -1. [Select or create a Cloud Platform project][projects]. -1. [Enable billing for your project][billing]. -1. [Enable the Google BigQuery Storage API][enable_api]. -1. [Set up authentication with a service account][auth] so you can access the - API from your local workstation. - - -## Running the tests - -1. [Prepare your environment for Node.js setup][setup]. - -1. Install dependencies: - - npm install - -1. Run the tests: - - # Run unit tests. - npm test - - # Run sample integration tests. - npm run samples-test - - # Run all system tests. - npm run system-test - -1. Lint (and maybe fix) any changes: - - npm run fix - -[setup]: https://cloud.google.com/nodejs/docs/setup -[projects]: https://console.cloud.google.com/project -[billing]: https://support.google.com/cloud/answer/6293499#enable-billing -[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com -[auth]: https://cloud.google.com/docs/authentication/getting-started \ No newline at end of file diff --git a/handwritten/bigquery-storage/LICENSE b/handwritten/bigquery-storage/LICENSE deleted file mode 100644 index d6456956733..00000000000 --- a/handwritten/bigquery-storage/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index a83b819122b..94c64d45735 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -1,3 +1,5 @@ +**_THIS REPOSITORY IS DEPRECATED. ALL OF ITS CONTENT AND HISTORY HAS BEEN MOVED TO [GOOGLE-CLOUD-NODE](https://github.com/googleapis/google-cloud-node/tree/main/packages/google-cloud-bigquery-storage)_** + [//]: # "This README.md file is auto-generated, all changes to this file will be lost." [//]: # "To regenerate it, use `python -m synthtool`." Google Cloud Platform logo diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json deleted file mode 100644 index befd23c8633..00000000000 --- a/handwritten/bigquery-storage/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/handwritten/bigquery-storage/owlbot.py b/handwritten/bigquery-storage/owlbot.py index 44b15e78f18..49fce7a6459 100644 --- a/handwritten/bigquery-storage/owlbot.py +++ b/handwritten/bigquery-storage/owlbot.py @@ -1,20 +1,17 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""This script is used to synthesize generated parts of this library.""" import synthtool.languages.node as node - -node.owlbot_main( - staging_excludes=['package.json', 'README.md', 'src/index.ts'], - templates_excludes=['src/index.ts'] -) +node.owlbot_main(templates_excludes=[ +'README.md' +]) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json deleted file mode 100644 index 2d6e2462757..00000000000 --- a/handwritten/bigquery-storage/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "@google-cloud/bigquery-storage", - "version": "3.2.0", - "description": "Client for the BigQuery Storage API", - "repository": "googleapis/nodejs-bigquery-storage", - "license": "Apache-2.0", - "author": "Google LLC", - "files": [ - "build/src", - "build/protos" - ], - "main": "build/src/index.js", - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "prelint": "cd samples; npm link ../; npm install", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test", - "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", - "precompile": "gts clean" - }, - "dependencies": { - "google-gax": "^3.5.2" - }, - "devDependencies": { - "@types/mocha": "^9.0.0", - "@types/node": "^18.0.0", - "@types/sinon": "^10.0.0", - "c8": "^7.1.0", - "gts": "^3.1.0", - "jsdoc": "^4.0.0", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.0", - "mocha": "^9.2.2", - "null-loader": "^4.0.0", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^9.0.0", - "typescript": "^4.8.3", - "webpack": "^5.0.0", - "webpack-cli": "^4.0.0" - }, - "engines": { - "node": ">=12.0.0" - } -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto deleted file mode 100644 index 1627fd12a0c..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto +++ /dev/null @@ -1,28 +0,0 @@ -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -import "google/protobuf/descriptor.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option java_multiple_files = true; -option java_outer_classname = "AnnotationsProto"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -extend google.protobuf.FieldOptions { - // Setting the column_name extension allows users to reference - // bigquery column independently of the field name in the protocol buffer - // message. - // - // The intended use of this annotation is to reference a destination column - // named using characters unavailable for protobuf field names (e.g. unicode - // characters). - // - // More details about BigQuery naming limitations can be found here: - // https://cloud.google.com/bigquery/docs/schemas#column_names - // - // This extension is currently experimental. - optional string column_name = 454943157; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto deleted file mode 100644 index 6d3f6080bf6..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "ArrowProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -// Arrow schema as specified in -// https://arrow.apache.org/docs/python/api/datatypes.html -// and serialized to bytes using IPC: -// https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc -// -// See code samples on how this message can be deserialized. -message ArrowSchema { - // IPC serialized Arrow schema. - bytes serialized_schema = 1; -} - -// Arrow RecordBatch. -message ArrowRecordBatch { - // IPC-serialized Arrow RecordBatch. - bytes serialized_record_batch = 1; - - // [Deprecated] The count of rows in `serialized_record_batch`. - // Please use the format-independent ReadRowsResponse.row_count instead. - int64 row_count = 2 [deprecated = true]; -} - -// Contains options specific to Arrow Serialization. -message ArrowSerializationOptions { - // Compression codec's supported by Arrow. - enum CompressionCodec { - // If unspecified no compression will be used. - COMPRESSION_UNSPECIFIED = 0; - - // LZ4 Frame (https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md) - LZ4_FRAME = 1; - - // Zstandard compression. - ZSTD = 2; - } - - // The compression codec to use for Arrow buffers in serialized record - // batches. - CompressionCodec buffer_compression = 2; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto deleted file mode 100644 index e1ecb667b61..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "AvroProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -// Avro schema. -message AvroSchema { - // Json serialized schema, as described at - // https://avro.apache.org/docs/1.8.1/spec.html. - string schema = 1; -} - -// Avro rows. -message AvroRows { - // Binary serialized rows in a block. - bytes serialized_binary_rows = 1; - - // [Deprecated] The count of rows in the returning block. - // Please use the format-independent ReadRowsResponse.row_count instead. - int64 row_count = 2 [deprecated = true]; -} - -// Contains options specific to Avro Serialization. -message AvroSerializationOptions { - // Enable displayName attribute in Avro schema. - // - // The Avro specification requires field names to be alphanumeric. By - // default, in cases when column names do not conform to these requirements - // (e.g. non-ascii unicode codepoints) and Avro is requested as an output - // format, the CreateReadSession call will fail. - // - // Setting this field to true, populates avro field names with a placeholder - // value and populates a "displayName" attribute for every avro field with the - // original column name. - bool enable_display_name_attribute = 1; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto deleted file mode 100644 index b3754acf7b3..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -import "google/protobuf/descriptor.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "ProtoBufProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -// ProtoSchema describes the schema of the serialized protocol buffer data rows. -message ProtoSchema { - // Descriptor for input message. The provided descriptor must be self - // contained, such that data rows sent can be fully decoded using only the - // single descriptor. For data rows that are compositions of multiple - // independent messages, this means the descriptor may need to be transformed - // to only use nested types: - // https://developers.google.com/protocol-buffers/docs/proto#nested - // - // For additional information for how proto types and values map onto BigQuery - // see: https://cloud.google.com/bigquery/docs/write-api#data_type_conversions - google.protobuf.DescriptorProto proto_descriptor = 1; -} - -message ProtoRows { - // A sequence of rows serialized as a Protocol Buffer. - // - // See https://developers.google.com/protocol-buffers/docs/overview for more - // information on deserializing this field. - repeated bytes serialized_rows = 1; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto deleted file mode 100644 index df602135b9d..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ /dev/null @@ -1,666 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/storage/v1/arrow.proto"; -import "google/cloud/bigquery/storage/v1/avro.proto"; -import "google/cloud/bigquery/storage/v1/protobuf.proto"; -import "google/cloud/bigquery/storage/v1/stream.proto"; -import "google/cloud/bigquery/storage/v1/table.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "google/rpc/status.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "StorageProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; -option (google.api.resource_definition) = { - type: "bigquery.googleapis.com/Table" - pattern: "projects/{project}/datasets/{dataset}/tables/{table}" -}; - -// BigQuery Read API. -// -// The Read API can be used to read data from BigQuery. -service BigQueryRead { - option (google.api.default_host) = "bigquerystorage.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a new read session. A read session divides the contents of a - // BigQuery table into one or more streams, which can then be used to read - // data from the table. The read session also specifies properties of the - // data to be read, such as a list of columns or a push-down filter describing - // the rows to be returned. - // - // A particular row can be read by at most one stream. When the caller has - // reached the end of each stream in the session, then all the data in the - // table has been read. - // - // Data is assigned to each stream such that roughly the same number of - // rows can be read from each stream. Because the server-side unit for - // assigning data is collections of rows, the API does not guarantee that - // each stream will return the same number or rows. Additionally, the - // limits are enforced based on the number of pre-filtered rows, so some - // filters can lead to lopsided assignments. - // - // Read sessions automatically expire 6 hours after they are created and do - // not require manual clean-up by the caller. - rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { - option (google.api.http) = { - post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" - body: "*" - }; - option (google.api.method_signature) = "parent,read_session,max_stream_count"; - } - - // Reads rows from the stream in the format prescribed by the ReadSession. - // Each response contains one or more table rows, up to a maximum of 100 MiB - // per response; read requests which attempt to read individual rows larger - // than 100 MiB will fail. - // - // Each request also returns a set of stream statistics reflecting the current - // state of the stream. - rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { - option (google.api.http) = { - get: "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" - }; - option (google.api.method_signature) = "read_stream,offset"; - } - - // Splits a given `ReadStream` into two `ReadStream` objects. These - // `ReadStream` objects are referred to as the primary and the residual - // streams of the split. The original `ReadStream` can still be read from in - // the same manner as before. Both of the returned `ReadStream` objects can - // also be read from, and the rows returned by both child streams will be - // the same as the rows read from the original stream. - // - // Moreover, the two child streams will be allocated back-to-back in the - // original `ReadStream`. Concretely, it is guaranteed that for streams - // original, primary, and residual, that original[0-j] = primary[0-j] and - // original[j-n] = residual[0-m] once the streams have been read to - // completion. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" - }; - } -} - -// BigQuery Write API. -// -// The Write API can be used to write data to BigQuery. -// -// For supplementary information about the Write API, see: -// https://cloud.google.com/bigquery/docs/write-api -service BigQueryWrite { - option (google.api.default_host) = "bigquerystorage.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/bigquery.insertdata," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a write stream to the given table. - // Additionally, every table has a special stream named '_default' - // to which data can be written. This stream doesn't need to be created using - // CreateWriteStream. It is a stream that can be used simultaneously by any - // number of clients. Data written to this stream is considered committed as - // soon as an acknowledgement is received. - rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/datasets/*/tables/*}" - body: "write_stream" - }; - option (google.api.method_signature) = "parent,write_stream"; - } - - // Appends data to the given stream. - // - // If `offset` is specified, the `offset` is checked against the end of - // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an - // attempt is made to append to an offset beyond the current end of the stream - // or `ALREADY_EXISTS` if user provides an `offset` that has already been - // written to. User can retry with adjusted offset within the same RPC - // connection. If `offset` is not specified, append happens at the end of the - // stream. - // - // The response contains an optional offset at which the append - // happened. No offset information will be returned for appends to a - // default stream. - // - // Responses are received in the same order in which requests are sent. - // There will be one response for each successful inserted request. Responses - // may optionally embed error information if the originating AppendRequest was - // not successfully processed. - // - // The specifics of when successfully appended data is made visible to the - // table are governed by the type of stream: - // - // * For COMMITTED streams (which includes the default stream), data is - // visible immediately upon successful append. - // - // * For BUFFERED streams, data is made visible via a subsequent `FlushRows` - // rpc which advances a cursor to a newer offset in the stream. - // - // * For PENDING streams, data is not made visible until the stream itself is - // finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly - // committed via the `BatchCommitWriteStreams` rpc. - rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) { - option (google.api.http) = { - post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "write_stream"; - } - - // Gets information about a write stream. - rpc GetWriteStream(GetWriteStreamRequest) returns (WriteStream) { - option (google.api.http) = { - post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Finalize a write stream so that no new data can be appended to the - // stream. Finalize is not supported on the '_default' stream. - rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) { - option (google.api.http) = { - post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Atomically commits a group of `PENDING` streams that belong to the same - // `parent` table. - // - // Streams must be finalized before commit and cannot be committed multiple - // times. Once a stream is committed, data in the stream becomes available - // for read operations. - rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/datasets/*/tables/*}" - }; - option (google.api.method_signature) = "parent"; - } - - // Flushes rows to a BUFFERED stream. - // - // If users are appending rows to BUFFERED stream, flush operation is - // required in order for the rows to become available for reading. A - // Flush operation flushes up to any previously flushed offset in a BUFFERED - // stream, to the offset specified in the request. - // - // Flush is not supported on the _default stream, since it is not BUFFERED. - rpc FlushRows(FlushRowsRequest) returns (FlushRowsResponse) { - option (google.api.http) = { - post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "write_stream"; - } -} - -// Request message for `CreateReadSession`. -message CreateReadSessionRequest { - // Required. The request project that owns the session, in the form of - // `projects/{project_id}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - } - ]; - - // Required. Session to be created. - ReadSession read_session = 2 [(google.api.field_behavior) = REQUIRED]; - - // Max initial number of streams. If unset or zero, the server will - // provide a value of streams so as to produce reasonable throughput. Must be - // non-negative. The number of streams may be lower than the requested number, - // depending on the amount parallelism that is reasonable for the table. - // There is a default system max limit of 1,000. - // - // This must be greater than or equal to preferred_min_stream_count. - // Typically, clients should either leave this unset to let the system to - // determine an upper bound OR set this a size for the maximum "units of work" - // it can gracefully handle. - int32 max_stream_count = 3; - - // The minimum preferred stream count. This parameter can be used to inform - // the service that there is a desired lower bound on the number of streams. - // This is typically a target parallelism of the client (e.g. a Spark - // cluster with N-workers would set this to a low multiple of N to ensure - // good cluster utilization). - // - // The system will make a best effort to provide at least this number of - // streams, but in some cases might provide less. - int32 preferred_min_stream_count = 4; -} - -// Request message for `ReadRows`. -message ReadRowsRequest { - // Required. Stream to read rows from. - string read_stream = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/ReadStream" - } - ]; - - // The offset requested must be less than the last row read from Read. - // Requesting a larger offset is undefined. If not specified, start reading - // from offset zero. - int64 offset = 2; -} - -// Information on if the current connection is being throttled. -message ThrottleState { - // How much this connection is being throttled. Zero means no throttling, - // 100 means fully throttled. - int32 throttle_percent = 1; -} - -// Estimated stream statistics for a given read Stream. -message StreamStats { - message Progress { - // The fraction of rows assigned to the stream that have been processed by - // the server so far, not including the rows in the current response - // message. - // - // This value, along with `at_response_end`, can be used to interpolate - // the progress made as the rows in the message are being processed using - // the following formula: `at_response_start + (at_response_end - - // at_response_start) * rows_processed_from_response / rows_in_response`. - // - // Note that if a filter is provided, the `at_response_end` value of the - // previous response may not necessarily be equal to the - // `at_response_start` value of the current response. - double at_response_start = 1; - - // Similar to `at_response_start`, except that this value includes the - // rows in the current response. - double at_response_end = 2; - } - - // Represents the progress of the current stream. - Progress progress = 2; -} - -// Response from calling `ReadRows` may include row data, progress and -// throttling information. -message ReadRowsResponse { - // Row data is returned in format specified during session creation. - oneof rows { - // Serialized row data in AVRO format. - AvroRows avro_rows = 3; - - // Serialized row data in Arrow RecordBatch format. - ArrowRecordBatch arrow_record_batch = 4; - } - - // Number of serialized rows in the rows block. - int64 row_count = 6; - - // Statistics for the stream. - StreamStats stats = 2; - - // Throttling state. If unset, the latest response still describes - // the current throttling status. - ThrottleState throttle_state = 5; - - // The schema for the read. If read_options.selected_fields is set, the - // schema may be different from the table schema as it will only contain - // the selected fields. This schema is equivalent to the one returned by - // CreateSession. This field is only populated in the first ReadRowsResponse - // RPC. - oneof schema { - // Output only. Avro schema. - AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Arrow schema. - ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - } -} - -// Request message for `SplitReadStream`. -message SplitReadStreamRequest { - // Required. Name of the stream to split. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/ReadStream" - } - ]; - - // A value in the range (0.0, 1.0) that specifies the fractional point at - // which the original stream should be split. The actual split point is - // evaluated on pre-filtered rows, so if a filter is provided, then there is - // no guarantee that the division of the rows between the new child streams - // will be proportional to this fractional value. Additionally, because the - // server-side unit for assigning data is collections of rows, this fraction - // will always map to a data storage boundary on the server side. - double fraction = 2; -} - -// Response message for `SplitReadStream`. -message SplitReadStreamResponse { - // Primary stream, which contains the beginning portion of - // |original_stream|. An empty value indicates that the original stream can no - // longer be split. - ReadStream primary_stream = 1; - - // Remainder stream, which contains the tail of |original_stream|. An empty - // value indicates that the original stream can no longer be split. - ReadStream remainder_stream = 2; -} - -// Request message for `CreateWriteStream`. -message CreateWriteStreamRequest { - // Required. Reference to the table to which the stream belongs, in the format - // of `projects/{project}/datasets/{dataset}/tables/{table}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } - ]; - - // Required. Stream to be created. - WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Request message for `AppendRows`. -// -// Due to the nature of AppendRows being a bidirectional streaming RPC, certain -// parts of the AppendRowsRequest need only be specified for the first request -// sent each time the gRPC network connection is opened/reopened. -// -// The size of a single AppendRowsRequest must be less than 10 MB in size. -// Requests larger than this return an error, typically `INVALID_ARGUMENT`. -message AppendRowsRequest { - // ProtoData contains the data rows and schema when constructing append - // requests. - message ProtoData { - // Proto schema used to serialize the data. This value only needs to be - // provided as part of the first request on a gRPC network connection, - // and will be ignored for subsequent requests on the connection. - ProtoSchema writer_schema = 1; - - // Serialized row data in protobuf message format. - // Currently, the backend expects the serialized rows to adhere to - // proto2 semantics when appending rows, particularly with respect to - // how default values are encoded. - ProtoRows rows = 2; - } - - // Required. The write_stream identifies the target of the append operation, and only - // needs to be specified as part of the first request on the gRPC connection. - // If provided for subsequent requests, it must match the value of the first - // request. - // - // For explicitly created write streams, the format is: - // - // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` - // - // For the special default stream, the format is: - // - // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. - string write_stream = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/WriteStream" - } - ]; - - // If present, the write is only performed if the next append offset is same - // as the provided value. If not present, the write is performed at the - // current end of stream. Specifying a value for this field is not allowed - // when calling AppendRows for the '_default' stream. - google.protobuf.Int64Value offset = 2; - - // Input rows. The `writer_schema` field must be specified at the initial - // request and currently, it will be ignored if specified in following - // requests. Following requests must have data in the same format as the - // initial request. - oneof rows { - // Rows in proto format. - ProtoData proto_rows = 4; - } - - // Id set by client to annotate its identity. Only initial request setting is - // respected. - string trace_id = 6; -} - -// Response message for `AppendRows`. -message AppendRowsResponse { - // AppendResult is returned for successful append requests. - message AppendResult { - // The row offset at which the last append occurred. The offset will not be - // set if appending using default streams. - google.protobuf.Int64Value offset = 1; - } - - oneof response { - // Result if the append is successful. - AppendResult append_result = 1; - - // Error returned when problems were encountered. If present, - // it indicates rows were not accepted into the system. - // Users can retry or continue with other append requests within the - // same connection. - // - // Additional information about error signalling: - // - // ALREADY_EXISTS: Happens when an append specified an offset, and the - // backend already has received data at this offset. Typically encountered - // in retry scenarios, and can be ignored. - // - // OUT_OF_RANGE: Returned when the specified offset in the stream is beyond - // the current end of the stream. - // - // INVALID_ARGUMENT: Indicates a malformed request or data. - // - // ABORTED: Request processing is aborted because of prior failures. The - // request can be retried if previous failure is addressed. - // - // INTERNAL: Indicates server side error(s) that can be retried. - google.rpc.Status error = 2; - } - - // If backend detects a schema update, pass it to user so that user can - // use it to input new type of message. It will be empty when no schema - // updates have occurred. - TableSchema updated_schema = 3; - - // If a request failed due to corrupted rows, no rows in the batch will be - // appended. The API will return row level error info, so that the caller can - // remove the bad rows and retry the request. - repeated RowError row_errors = 4; - - // The target of the append operation. Matches the write_stream in the - // corresponding request. - string write_stream = 5; -} - -// Request message for `GetWriteStreamRequest`. -message GetWriteStreamRequest { - // Required. Name of the stream to get, in the form of - // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/WriteStream" - } - ]; - - // Indicates whether to get full or partial view of the WriteStream. If - // not set, view returned will be basic. - WriteStreamView view = 3; -} - -// Request message for `BatchCommitWriteStreams`. -message BatchCommitWriteStreamsRequest { - // Required. Parent table that all the streams should belong to, in the form of - // `projects/{project}/datasets/{dataset}/tables/{table}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } - ]; - - // Required. The group of streams that will be committed atomically. - repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Response message for `BatchCommitWriteStreams`. -message BatchCommitWriteStreamsResponse { - // The time at which streams were committed in microseconds granularity. - // This field will only exist when there are no stream errors. - // **Note** if this field is not set, it means the commit was not successful. - google.protobuf.Timestamp commit_time = 1; - - // Stream level error if commit failed. Only streams with error will be in - // the list. - // If empty, there is no error and all streams are committed successfully. - // If non empty, certain streams have errors and ZERO stream is committed due - // to atomicity guarantee. - repeated StorageError stream_errors = 2; -} - -// Request message for invoking `FinalizeWriteStream`. -message FinalizeWriteStreamRequest { - // Required. Name of the stream to finalize, in the form of - // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/WriteStream" - } - ]; -} - -// Response message for `FinalizeWriteStream`. -message FinalizeWriteStreamResponse { - // Number of rows in the finalized stream. - int64 row_count = 1; -} - -// Request message for `FlushRows`. -message FlushRowsRequest { - // Required. The stream that is the target of the flush operation. - string write_stream = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerystorage.googleapis.com/WriteStream" - } - ]; - - // Ending offset of the flush operation. Rows before this offset(including - // this offset) will be flushed. - google.protobuf.Int64Value offset = 2; -} - -// Respond message for `FlushRows`. -message FlushRowsResponse { - // The rows before this offset (including this offset) are flushed. - int64 offset = 1; -} - -// Structured custom BigQuery Storage error message. The error can be attached -// as error details in the returned rpc Status. In particular, the use of error -// codes allows more structured error handling, and reduces the need to evaluate -// unstructured error text strings. -message StorageError { - // Error code for `StorageError`. - enum StorageErrorCode { - // Default error. - STORAGE_ERROR_CODE_UNSPECIFIED = 0; - - // Table is not found in the system. - TABLE_NOT_FOUND = 1; - - // Stream is already committed. - STREAM_ALREADY_COMMITTED = 2; - - // Stream is not found. - STREAM_NOT_FOUND = 3; - - // Invalid Stream type. - // For example, you try to commit a stream that is not pending. - INVALID_STREAM_TYPE = 4; - - // Invalid Stream state. - // For example, you try to commit a stream that is not finalized or is - // garbaged. - INVALID_STREAM_STATE = 5; - - // Stream is finalized. - STREAM_FINALIZED = 6; - - // There is a schema mismatch and it is caused by user schema has extra - // field than bigquery schema. - SCHEMA_MISMATCH_EXTRA_FIELDS = 7; - - // Offset already exists. - OFFSET_ALREADY_EXISTS = 8; - - // Offset out of range. - OFFSET_OUT_OF_RANGE = 9; - } - - // BigQuery Storage specific error code. - StorageErrorCode code = 1; - - // Name of the failed entity. - string entity = 2; - - // Message that describes the error. - string error_message = 3; -} - -// The message that presents row level error info in a request. -message RowError { - // Error code for `RowError`. - enum RowErrorCode { - // Default error. - ROW_ERROR_CODE_UNSPECIFIED = 0; - - // One or more fields in the row has errors. - FIELDS_ERROR = 1; - } - - // Index of the malformed row in the request. - int64 index = 1; - - // Structured error reason for a row error. - RowErrorCode code = 2; - - // Description of the issue encountered when processing the row. - string message = 3; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto deleted file mode 100644 index fe71adfa6b7..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ /dev/null @@ -1,286 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/storage/v1/arrow.proto"; -import "google/cloud/bigquery/storage/v1/avro.proto"; -import "google/cloud/bigquery/storage/v1/table.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "StreamProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -// Data format for input or output data. -enum DataFormat { - // Data format is unspecified. - DATA_FORMAT_UNSPECIFIED = 0; - - // Avro is a standard open source row based file format. - // See https://avro.apache.org/ for more details. - AVRO = 1; - - // Arrow is a standard open source column-based message format. - // See https://arrow.apache.org/ for more details. - ARROW = 2; -} - -// Information about the ReadSession. -message ReadSession { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/ReadSession" - pattern: "projects/{project}/locations/{location}/sessions/{session}" - }; - - // Additional attributes when reading a table. - message TableModifiers { - // The snapshot time of the table. If not set, interpreted as now. - google.protobuf.Timestamp snapshot_time = 1; - } - - // Options dictating how we read a table. - message TableReadOptions { - // Optional. The names of the fields in the table to be returned. If no - // field names are specified, then all fields in the table are returned. - // - // Nested fields -- the child elements of a STRUCT field -- can be selected - // individually using their fully-qualified names, and will be returned as - // record fields containing only the selected nested fields. If a STRUCT - // field is specified in the selected fields list, all of the child elements - // will be returned. - // - // As an example, consider a table with the following schema: - // - // { - // "name": "struct_field", - // "type": "RECORD", - // "mode": "NULLABLE", - // "fields": [ - // { - // "name": "string_field1", - // "type": "STRING", - // . "mode": "NULLABLE" - // }, - // { - // "name": "string_field2", - // "type": "STRING", - // "mode": "NULLABLE" - // } - // ] - // } - // - // Specifying "struct_field" in the selected fields list will result in a - // read session schema with the following logical structure: - // - // struct_field { - // string_field1 - // string_field2 - // } - // - // Specifying "struct_field.string_field1" in the selected fields list will - // result in a read session schema with the following logical structure: - // - // struct_field { - // string_field1 - // } - // - // The order of the fields in the read session schema is derived from the - // table schema and does not correspond to the order in which the fields are - // specified in this list. - repeated string selected_fields = 1; - - // SQL text filtering statement, similar to a WHERE clause in a query. - // Aggregates are not supported. - // - // Examples: "int_field > 5" - // "date_field = CAST('2014-9-27' as DATE)" - // "nullable_field is not NULL" - // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" - // "numeric_field BETWEEN 1.0 AND 5.0" - // - // Restricted to a maximum length for 1 MB. - string row_restriction = 2; - - oneof output_format_serialization_options { - // Optional. Options specific to the Apache Arrow output format. - ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Options specific to the Apache Avro output format - AvroSerializationOptions avro_serialization_options = 4 [(google.api.field_behavior) = OPTIONAL]; - } - } - - // Output only. Unique identifier for the session, in the form - // `projects/{project_id}/locations/{location}/sessions/{session_id}`. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Time at which the session becomes invalid. After this time, subsequent - // requests to read this Session will return errors. The expire_time is - // automatically assigned and currently cannot be specified or updated. - google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported. - DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; - - // The schema for the read. If read_options.selected_fields is set, the - // schema may be different from the table schema as it will only contain - // the selected fields. - oneof schema { - // Output only. Avro schema. - AvroSchema avro_schema = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Arrow schema. - ArrowSchema arrow_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - } - - // Immutable. Table that this ReadSession is reading from, in the form - // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` - string table = 6 [ - (google.api.field_behavior) = IMMUTABLE, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } - ]; - - // Optional. Any modifiers which are applied when reading from the specified table. - TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Read options for this session (e.g. column selection, filters). - TableReadOptions read_options = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. A list of streams created with the session. - // - // At least one stream is created with the session. In the future, larger - // request_stream_count values *may* result in this list being unpopulated, - // in that case, the user will need to use a List method to get the streams - // instead, which is not yet available. - repeated ReadStream streams = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An estimate on the number of bytes this session will scan when - // all streams are completely consumed. This estimate is based on - // metadata from the table which might be incomplete or stale. - int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. ID set by client to annotate a session identity. This does not need - // to be strictly unique, but instead the same ID should be used to group - // logically connected sessions (e.g. All using the same ID for all sessions - // needed to complete a Spark SQL query is reasonable). - // - // Maximum length is 256 bytes. - string trace_id = 13 [(google.api.field_behavior) = OPTIONAL]; -} - -// Information about a single stream that gets data out of the storage system. -// Most of the information about `ReadStream` instances is aggregated, making -// `ReadStream` lightweight. -message ReadStream { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/ReadStream" - pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" - }; - - // Output only. Name of the stream, in the form - // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// WriteStreamView is a view enum that controls what details about a write -// stream should be returned. -enum WriteStreamView { - // The default / unset value. - WRITE_STREAM_VIEW_UNSPECIFIED = 0; - - // The BASIC projection returns basic metadata about a write stream. The - // basic view does not include schema information. This is the default view - // returned by GetWriteStream. - BASIC = 1; - - // The FULL projection returns all available write stream metadata, including - // the schema. CreateWriteStream returns the full projection of write stream - // metadata. - FULL = 2; -} - -// Information about a single stream that gets data inside the storage system. -message WriteStream { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/WriteStream" - pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" - }; - - // Type enum of the stream. - enum Type { - // Unknown type. - TYPE_UNSPECIFIED = 0; - - // Data will commit automatically and appear as soon as the write is - // acknowledged. - COMMITTED = 1; - - // Data is invisible until the stream is committed. - PENDING = 2; - - // Data is only visible up to the offset to which it was flushed. - BUFFERED = 3; - } - - // Mode enum of the stream. - enum WriteMode { - // Unknown type. - WRITE_MODE_UNSPECIFIED = 0; - - // Insert new records into the table. - // It is the default value if customers do not specify it. - INSERT = 1; - } - - // Output only. Name of the stream, in the form - // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Immutable. Type of the stream. - Type type = 2 [(google.api.field_behavior) = IMMUTABLE]; - - // Output only. Create time of the stream. For the _default stream, this is the - // creation_time of the table. - google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Commit time of the stream. - // If a stream is of `COMMITTED` type, then it will have a commit_time same as - // `create_time`. If the stream is of `PENDING` type, empty commit_time - // means it is not committed. - google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The schema of the destination table. It is only returned in - // `CreateWriteStream` response. Caller should generate data that's - // compatible with this schema to send in initial `AppendRowsRequest`. - // The table schema could go out of date during the life time of the stream. - TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Immutable. Mode of the stream. - WriteMode write_mode = 7 [(google.api.field_behavior) = IMMUTABLE]; - - // Immutable. The geographic location where the stream's dataset resides. See - // https://cloud.google.com/bigquery/docs/locations for supported - // locations. - string location = 8 [(google.api.field_behavior) = IMMUTABLE]; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto deleted file mode 100644 index fa4f840c580..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; -option java_multiple_files = true; -option java_outer_classname = "TableProto"; -option java_package = "com.google.cloud.bigquery.storage.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; - -// Schema of a table. This schema is a subset of -// google.cloud.bigquery.v2.TableSchema containing information necessary to -// generate valid message to write to BigQuery. -message TableSchema { - // Describes the fields in a table. - repeated TableFieldSchema fields = 1; -} - -// TableFieldSchema defines a single field/column within a table schema. -message TableFieldSchema { - enum Type { - // Illegal value - TYPE_UNSPECIFIED = 0; - - // 64K, UTF8 - STRING = 1; - - // 64-bit signed - INT64 = 2; - - // 64-bit IEEE floating point - DOUBLE = 3; - - // Aggregate type - STRUCT = 4; - - // 64K, Binary - BYTES = 5; - - // 2-valued - BOOL = 6; - - // 64-bit signed usec since UTC epoch - TIMESTAMP = 7; - - // Civil date - Year, Month, Day - DATE = 8; - - // Civil time - Hour, Minute, Second, Microseconds - TIME = 9; - - // Combination of civil date and civil time - DATETIME = 10; - - // Geography object - GEOGRAPHY = 11; - - // Numeric value - NUMERIC = 12; - - // BigNumeric value - BIGNUMERIC = 13; - - // Interval - INTERVAL = 14; - - // JSON, String - JSON = 15; - } - - enum Mode { - // Illegal value - MODE_UNSPECIFIED = 0; - - NULLABLE = 1; - - REQUIRED = 2; - - REPEATED = 3; - } - - // Required. The field name. The name must contain only letters (a-z, A-Z), - // numbers (0-9), or underscores (_), and must start with a letter or - // underscore. The maximum length is 128 characters. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The field data type. - Type type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The field mode. The default value is NULLABLE. - Mode mode = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Describes the nested schema fields if the type property is set to STRUCT. - repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The field description. The maximum length is 1,024 characters. - string description = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Maximum length of values of this field for STRINGS or BYTES. - // - // If max_length is not specified, no maximum length constraint is imposed - // on this field. - // - // If type = "STRING", then max_length represents the maximum UTF-8 - // length of strings in this field. - // - // If type = "BYTES", then max_length represents the maximum number of - // bytes in this field. - // - // It is invalid to set this field if type is not "STRING" or "BYTES". - int64 max_length = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Precision (maximum number of total digits in base 10) and scale - // (maximum number of digits in the fractional part in base 10) constraints - // for values of this field for NUMERIC or BIGNUMERIC. - // - // It is invalid to set precision or scale if type is not "NUMERIC" or - // "BIGNUMERIC". - // - // If precision and scale are not specified, no value range constraint is - // imposed on this field insofar as values are permitted by the type. - // - // Values of this NUMERIC or BIGNUMERIC field must be in this range when: - // - // * Precision (P) and scale (S) are specified: - // [-10^(P-S) + 10^(-S), 10^(P-S) - 10^(-S)] - // * Precision (P) is specified but not scale (and thus scale is - // interpreted to be equal to zero): - // [-10^P + 1, 10^P - 1]. - // - // Acceptable values for precision and scale if both are specified: - // - // * If type = "NUMERIC": - // 1 <= precision - scale <= 29 and 0 <= scale <= 9. - // * If type = "BIGNUMERIC": - // 1 <= precision - scale <= 38 and 0 <= scale <= 38. - // - // Acceptable values for precision if only precision is specified but not - // scale (and thus scale is interpreted to be equal to zero): - // - // * If type = "NUMERIC": 1 <= precision <= 29. - // * If type = "BIGNUMERIC": 1 <= precision <= 38. - // - // If scale is specified but not precision, then it is invalid. - int64 precision = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. See documentation for precision. - int64 scale = 9 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto deleted file mode 100644 index f70c61c7246..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "ArrowProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Arrow schema. -message ArrowSchema { - // IPC serialized Arrow schema. - bytes serialized_schema = 1; -} - -// Arrow RecordBatch. -message ArrowRecordBatch { - // IPC serialized Arrow RecordBatch. - bytes serialized_record_batch = 1; - - // The count of rows in the returning block. - int64 row_count = 2; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto deleted file mode 100644 index 7d034a28a7e..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "AvroProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Avro schema. -message AvroSchema { - // Json serialized schema, as described at - // https://avro.apache.org/docs/1.8.1/spec.html - string schema = 1; -} - -// Avro rows. -message AvroRows { - // Binary serialized rows in a block. - bytes serialized_binary_rows = 1; - - // The count of rows in the returning block. - int64 row_count = 2; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto deleted file mode 100644 index 1ff8d8b5eb6..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Options dictating how we read a table. -message TableReadOptions { - // Optional. Names of the fields in the table that should be read. If empty, - // all fields will be read. If the specified field is a nested field, all the - // sub-fields in the field will be selected. The output field order is - // unrelated to the order of fields in selected_fields. - repeated string selected_fields = 1; - - // Optional. SQL text filtering statement, similar to a WHERE clause in - // a query. Aggregates are not supported. - // - // Examples: "int_field > 5" - // "date_field = CAST('2014-9-27' as DATE)" - // "nullable_field is not NULL" - // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" - // "numeric_field BETWEEN 1.0 AND 5.0" - string row_restriction = 2; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto deleted file mode 100644 index 0d311418a49..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ /dev/null @@ -1,405 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; -import "google/cloud/bigquery/storage/v1beta1/avro.proto"; -import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; -import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// BigQuery storage API. -// -// The BigQuery storage API can be used to read data stored in BigQuery. -service BigQueryStorage { - option (google.api.default_host) = "bigquerystorage.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/bigquery," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a new read session. A read session divides the contents of a - // BigQuery table into one or more streams, which can then be used to read - // data from the table. The read session also specifies properties of the - // data to be read, such as a list of columns or a push-down filter describing - // the rows to be returned. - // - // A particular row can be read by at most one stream. When the caller has - // reached the end of each stream in the session, then all the data in the - // table has been read. - // - // Read sessions automatically expire 24 hours after they are created and do - // not require manual clean-up by the caller. - rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { - option (google.api.http) = { - post: "/v1beta1/{table_reference.project_id=projects/*}" - body: "*" - additional_bindings { - post: "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}" - body: "*" - } - }; - option (google.api.method_signature) = "table_reference,parent,requested_streams"; - } - - // Reads rows from the table in the format prescribed by the read session. - // Each response contains one or more table rows, up to a maximum of 10 MiB - // per response; read requests which attempt to read individual rows larger - // than this will fail. - // - // Each request also returns a set of stream statistics reflecting the - // estimated total number of rows in the read stream. This number is computed - // based on the total table size and the number of active streams in the read - // session, and may change as other streams continue to read data. - rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { - option (google.api.http) = { - get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" - }; - option (google.api.method_signature) = "read_position"; - } - - // Creates additional streams for a ReadSession. This API can be used to - // dynamically adjust the parallelism of a batch processing task upwards by - // adding additional workers. - rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) { - option (google.api.http) = { - post: "/v1beta1/{session.name=projects/*/sessions/*}" - body: "*" - }; - option (google.api.method_signature) = "session,requested_streams"; - } - - // Triggers the graceful termination of a single stream in a ReadSession. This - // API can be used to dynamically adjust the parallelism of a batch processing - // task downwards without losing data. - // - // This API does not delete the stream -- it remains visible in the - // ReadSession, and any data processed by the stream is not released to other - // streams. However, no additional data will be assigned to the stream once - // this call completes. Callers must continue reading data on the stream until - // the end of the stream is reached so that data which has already been - // assigned to the stream will be processed. - // - // This method will return an error if there are no other live streams - // in the Session, or if SplitReadStream() has been called on the given - // Stream. - rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1beta1/{stream.name=projects/*/streams/*}" - body: "*" - }; - option (google.api.method_signature) = "stream"; - } - - // Splits a given read stream into two Streams. These streams are referred to - // as the primary and the residual of the split. The original stream can still - // be read from in the same manner as before. Both of the returned streams can - // also be read from, and the total rows return by both child streams will be - // the same as the rows read from the original stream. - // - // Moreover, the two child streams will be allocated back to back in the - // original Stream. Concretely, it is guaranteed that for streams Original, - // Primary, and Residual, that Original[0-j] = Primary[0-j] and - // Original[j-n] = Residual[0-m] once the streams have been read to - // completion. - // - // This method is guaranteed to be idempotent. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { - option (google.api.http) = { - get: "/v1beta1/{original_stream.name=projects/*/streams/*}" - }; - option (google.api.method_signature) = "original_stream"; - } -} - -// Information about a single data stream within a read session. -message Stream { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/Stream" - pattern: "projects/{project}/locations/{location}/streams/{stream}" - }; - - // Name of the stream, in the form - // `projects/{project_id}/locations/{location}/streams/{stream_id}`. - string name = 1; -} - -// Expresses a point within a given stream using an offset position. -message StreamPosition { - // Identifier for a given Stream. - Stream stream = 1; - - // Position in the stream. - int64 offset = 2; -} - -// Information returned from a `CreateReadSession` request. -message ReadSession { - option (google.api.resource) = { - type: "bigquerystorage.googleapis.com/ReadSession" - pattern: "projects/{project}/locations/{location}/sessions/{session}" - }; - - // Unique identifier for the session, in the form - // `projects/{project_id}/locations/{location}/sessions/{session_id}`. - string name = 1; - - // Time at which the session becomes invalid. After this time, subsequent - // requests to read this Session will return errors. - google.protobuf.Timestamp expire_time = 2; - - // The schema for the read. If read_options.selected_fields is set, the - // schema may be different from the table schema as it will only contain - // the selected fields. - oneof schema { - // Avro schema. - AvroSchema avro_schema = 5; - - // Arrow schema. - ArrowSchema arrow_schema = 6; - } - - // Streams associated with this session. - repeated Stream streams = 4; - - // Table that this ReadSession is reading from. - TableReference table_reference = 7; - - // Any modifiers which are applied when reading from the specified table. - TableModifiers table_modifiers = 8; - - // The strategy to use for distributing data among the streams. - ShardingStrategy sharding_strategy = 9; -} - -// Creates a new read session, which may include additional options such as -// requested parallelism, projection filters and constraints. -message CreateReadSessionRequest { - // Required. Reference to the table to read. - TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. String of the form `projects/{project_id}` indicating the - // project this ReadSession is associated with. This is the project that will - // be billed for usage. - string parent = 6 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - } - ]; - - // Any modifiers to the Table (e.g. snapshot timestamp). - TableModifiers table_modifiers = 2; - - // Initial number of streams. If unset or 0, we will - // provide a value of streams so as to produce reasonable throughput. Must be - // non-negative. The number of streams may be lower than the requested number, - // depending on the amount parallelism that is reasonable for the table and - // the maximum amount of parallelism allowed by the system. - // - // Streams must be read starting from offset 0. - int32 requested_streams = 3; - - // Read options for this session (e.g. column selection, filters). - TableReadOptions read_options = 4; - - // Data output format. Currently default to Avro. - DataFormat format = 5; - - // The strategy to use for distributing data among multiple streams. Currently - // defaults to liquid sharding. - ShardingStrategy sharding_strategy = 7; -} - -// Data format for input or output data. -enum DataFormat { - // Data format is unspecified. - DATA_FORMAT_UNSPECIFIED = 0; - - // Avro is a standard open source row based file format. - // See https://avro.apache.org/ for more details. - AVRO = 1; - - ARROW = 3; -} - -// Strategy for distributing data among multiple streams in a read session. -enum ShardingStrategy { - // Same as LIQUID. - SHARDING_STRATEGY_UNSPECIFIED = 0; - - // Assigns data to each stream based on the client's read rate. The faster the - // client reads from a stream, the more data is assigned to the stream. In - // this strategy, it's possible to read all data from a single stream even if - // there are other streams present. - LIQUID = 1; - - // Assigns data to each stream such that roughly the same number of rows can - // be read from each stream. Because the server-side unit for assigning data - // is collections of rows, the API does not guarantee that each stream will - // return the same number or rows. Additionally, the limits are enforced based - // on the number of pre-filtering rows, so some filters can lead to lopsided - // assignments. - BALANCED = 2; -} - -// Requesting row data via `ReadRows` must provide Stream position information. -message ReadRowsRequest { - // Required. Identifier of the position in the stream to start reading from. - // The offset requested must be less than the last row read from ReadRows. - // Requesting a larger offset is undefined. - StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Progress information for a given Stream. -message StreamStatus { - // Number of estimated rows in the current stream. May change over time as - // different readers in the stream progress at rates which are relatively fast - // or slow. - int64 estimated_row_count = 1; - - // A value in the range [0.0, 1.0] that represents the fraction of rows - // assigned to this stream that have been processed by the server. In the - // presence of read filters, the server may process more rows than it returns, - // so this value reflects progress through the pre-filtering rows. - // - // This value is only populated for sessions created through the BALANCED - // sharding strategy. - float fraction_consumed = 2; - - // Represents the progress of the current stream. - Progress progress = 4; - - // Whether this stream can be split. For sessions that use the LIQUID sharding - // strategy, this value is always false. For BALANCED sessions, this value is - // false when enough data have been read such that no more splits are possible - // at that point or beyond. For small tables or streams that are the result of - // a chain of splits, this value may never be true. - bool is_splittable = 3; -} - -message Progress { - // The fraction of rows assigned to the stream that have been processed by the - // server so far, not including the rows in the current response message. - // - // This value, along with `at_response_end`, can be used to interpolate the - // progress made as the rows in the message are being processed using the - // following formula: `at_response_start + (at_response_end - - // at_response_start) * rows_processed_from_response / rows_in_response`. - // - // Note that if a filter is provided, the `at_response_end` value of the - // previous response may not necessarily be equal to the `at_response_start` - // value of the current response. - float at_response_start = 1; - - // Similar to `at_response_start`, except that this value includes the rows in - // the current response. - float at_response_end = 2; -} - -// Information on if the current connection is being throttled. -message ThrottleStatus { - // How much this connection is being throttled. - // 0 is no throttling, 100 is completely throttled. - int32 throttle_percent = 1; -} - -// Response from calling `ReadRows` may include row data, progress and -// throttling information. -message ReadRowsResponse { - // Row data is returned in format specified during session creation. - oneof rows { - // Serialized row data in AVRO format. - AvroRows avro_rows = 3; - - // Serialized row data in Arrow RecordBatch format. - ArrowRecordBatch arrow_record_batch = 4; - } - - // Number of serialized rows in the rows block. This value is recorded here, - // in addition to the row_count values in the output-specific messages in - // `rows`, so that code which needs to record progress through the stream can - // do so in an output format-independent way. - int64 row_count = 6; - - // Estimated stream statistics. - StreamStatus status = 2; - - // Throttling status. If unset, the latest response still describes - // the current throttling status. - ThrottleStatus throttle_status = 5; -} - -// Information needed to request additional streams for an established read -// session. -message BatchCreateReadSessionStreamsRequest { - // Required. Must be a non-expired session obtained from a call to - // CreateReadSession. Only the name field needs to be set. - ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Number of new streams requested. Must be positive. - // Number of added streams may be less than this, see CreateReadSessionRequest - // for more information. - int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The response from `BatchCreateReadSessionStreams` returns the stream -// identifiers for the newly created streams. -message BatchCreateReadSessionStreamsResponse { - // Newly added streams. - repeated Stream streams = 1; -} - -// Request information for invoking `FinalizeStream`. -message FinalizeStreamRequest { - // Required. Stream to finalize. - Stream stream = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Request information for `SplitReadStream`. -message SplitReadStreamRequest { - // Required. Stream to split. - Stream original_stream = 1 [(google.api.field_behavior) = REQUIRED]; - - // A value in the range (0.0, 1.0) that specifies the fractional point at - // which the original stream should be split. The actual split point is - // evaluated on pre-filtered rows, so if a filter is provided, then there is - // no guarantee that the division of the rows between the new child streams - // will be proportional to this fractional value. Additionally, because the - // server-side unit for assigning data is collections of rows, this fraction - // will always map to to a data storage boundary on the server side. - float fraction = 2; -} - -// Response from `SplitReadStream`. -message SplitReadStreamResponse { - // Primary stream, which contains the beginning portion of - // |original_stream|. An empty value indicates that the original stream can no - // longer be split. - Stream primary_stream = 1; - - // Remainder stream, which contains the tail of |original_stream|. An empty - // value indicates that the original stream can no longer be split. - Stream remainder_stream = 2; -} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto deleted file mode 100644 index 22c940c0e6b..00000000000 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.storage.v1beta1; - -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; -option java_outer_classname = "TableReferenceProto"; -option java_package = "com.google.cloud.bigquery.storage.v1beta1"; - -// Table reference that includes just the 3 strings needed to identify a table. -message TableReference { - // The assigned project ID of the project. - string project_id = 1; - - // The ID of the dataset in the above project. - string dataset_id = 2; - - // The ID of the table in the above dataset. - string table_id = 3; -} - -// All fields in this message optional. -message TableModifiers { - // The snapshot time of the table. If not set, interpreted as now. - google.protobuf.Timestamp snapshot_time = 1; -} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts deleted file mode 100644 index 7d4eae00205..00000000000 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ /dev/null @@ -1,11968 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import type {protobuf as $protobuf} from "google-gax"; -import Long = require("long"); -/** Namespace google. */ -export namespace google { - - /** Namespace cloud. */ - namespace cloud { - - /** Namespace bigquery. */ - namespace bigquery { - - /** Namespace storage. */ - namespace storage { - - /** Namespace v1. */ - namespace v1 { - - /** Properties of an ArrowSchema. */ - interface IArrowSchema { - - /** ArrowSchema serializedSchema */ - serializedSchema?: (Uint8Array|string|null); - } - - /** Represents an ArrowSchema. */ - class ArrowSchema implements IArrowSchema { - - /** - * Constructs a new ArrowSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSchema); - - /** ArrowSchema serializedSchema. */ - public serializedSchema: (Uint8Array|string); - - /** - * Creates a new ArrowSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns ArrowSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSchema): google.cloud.bigquery.storage.v1.ArrowSchema; - - /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSchema; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSchema; - - /** - * Verifies an ArrowSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ArrowSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSchema; - - /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. - * @param message ArrowSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ArrowSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ArrowSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an ArrowRecordBatch. */ - interface IArrowRecordBatch { - - /** ArrowRecordBatch serializedRecordBatch */ - serializedRecordBatch?: (Uint8Array|string|null); - - /** ArrowRecordBatch rowCount */ - rowCount?: (number|Long|string|null); - } - - /** Represents an ArrowRecordBatch. */ - class ArrowRecordBatch implements IArrowRecordBatch { - - /** - * Constructs a new ArrowRecordBatch. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch); - - /** ArrowRecordBatch serializedRecordBatch. */ - public serializedRecordBatch: (Uint8Array|string); - - /** ArrowRecordBatch rowCount. */ - public rowCount: (number|Long|string); - - /** - * Creates a new ArrowRecordBatch instance using the specified properties. - * @param [properties] Properties to set - * @returns ArrowRecordBatch instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch): google.cloud.bigquery.storage.v1.ArrowRecordBatch; - - /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowRecordBatch; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowRecordBatch; - - /** - * Verifies an ArrowRecordBatch message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ArrowRecordBatch - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowRecordBatch; - - /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. - * @param message ArrowRecordBatch - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ArrowRecordBatch to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ArrowRecordBatch - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an ArrowSerializationOptions. */ - interface IArrowSerializationOptions { - - /** ArrowSerializationOptions bufferCompression */ - bufferCompression?: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null); - } - - /** Represents an ArrowSerializationOptions. */ - class ArrowSerializationOptions implements IArrowSerializationOptions { - - /** - * Constructs a new ArrowSerializationOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions); - - /** ArrowSerializationOptions bufferCompression. */ - public bufferCompression: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec); - - /** - * Creates a new ArrowSerializationOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns ArrowSerializationOptions instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; - - /** - * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. - * @param message ArrowSerializationOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. - * @param message ArrowSerializationOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ArrowSerializationOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ArrowSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; - - /** - * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ArrowSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; - - /** - * Verifies an ArrowSerializationOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ArrowSerializationOptions - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; - - /** - * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. - * @param message ArrowSerializationOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ArrowSerializationOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ArrowSerializationOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace ArrowSerializationOptions { - - /** CompressionCodec enum. */ - enum CompressionCodec { - COMPRESSION_UNSPECIFIED = 0, - LZ4_FRAME = 1, - ZSTD = 2 - } - } - - /** Properties of an AvroSchema. */ - interface IAvroSchema { - - /** AvroSchema schema */ - schema?: (string|null); - } - - /** Represents an AvroSchema. */ - class AvroSchema implements IAvroSchema { - - /** - * Constructs a new AvroSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSchema); - - /** AvroSchema schema. */ - public schema: string; - - /** - * Creates a new AvroSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns AvroSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSchema): google.cloud.bigquery.storage.v1.AvroSchema; - - /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AvroSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSchema; - - /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSchema; - - /** - * Verifies an AvroSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AvroSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSchema; - - /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. - * @param message AvroSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AvroSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AvroSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an AvroRows. */ - interface IAvroRows { - - /** AvroRows serializedBinaryRows */ - serializedBinaryRows?: (Uint8Array|string|null); - - /** AvroRows rowCount */ - rowCount?: (number|Long|string|null); - } - - /** Represents an AvroRows. */ - class AvroRows implements IAvroRows { - - /** - * Constructs a new AvroRows. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IAvroRows); - - /** AvroRows serializedBinaryRows. */ - public serializedBinaryRows: (Uint8Array|string); - - /** AvroRows rowCount. */ - public rowCount: (number|Long|string); - - /** - * Creates a new AvroRows instance using the specified properties. - * @param [properties] Properties to set - * @returns AvroRows instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IAvroRows): google.cloud.bigquery.storage.v1.AvroRows; - - /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AvroRows message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroRows; - - /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroRows; - - /** - * Verifies an AvroRows message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AvroRows - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroRows; - - /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. - * @param message AvroRows - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AvroRows to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AvroRows - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an AvroSerializationOptions. */ - interface IAvroSerializationOptions { - - /** AvroSerializationOptions enableDisplayNameAttribute */ - enableDisplayNameAttribute?: (boolean|null); - } - - /** Represents an AvroSerializationOptions. */ - class AvroSerializationOptions implements IAvroSerializationOptions { - - /** - * Constructs a new AvroSerializationOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions); - - /** AvroSerializationOptions enableDisplayNameAttribute. */ - public enableDisplayNameAttribute: boolean; - - /** - * Creates a new AvroSerializationOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns AvroSerializationOptions instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions): google.cloud.bigquery.storage.v1.AvroSerializationOptions; - - /** - * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. - * @param message AvroSerializationOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. - * @param message AvroSerializationOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AvroSerializationOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AvroSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSerializationOptions; - - /** - * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AvroSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSerializationOptions; - - /** - * Verifies an AvroSerializationOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AvroSerializationOptions - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSerializationOptions; - - /** - * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. - * @param message AvroSerializationOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AvroSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AvroSerializationOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AvroSerializationOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ProtoSchema. */ - interface IProtoSchema { - - /** ProtoSchema protoDescriptor */ - protoDescriptor?: (google.protobuf.IDescriptorProto|null); - } - - /** Represents a ProtoSchema. */ - class ProtoSchema implements IProtoSchema { - - /** - * Constructs a new ProtoSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IProtoSchema); - - /** ProtoSchema protoDescriptor. */ - public protoDescriptor?: (google.protobuf.IDescriptorProto|null); - - /** - * Creates a new ProtoSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns ProtoSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IProtoSchema): google.cloud.bigquery.storage.v1.ProtoSchema; - - /** - * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. - * @param message ProtoSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. - * @param message ProtoSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ProtoSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ProtoSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoSchema; - - /** - * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ProtoSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoSchema; - - /** - * Verifies a ProtoSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ProtoSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoSchema; - - /** - * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. - * @param message ProtoSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ProtoSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ProtoSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ProtoSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ProtoRows. */ - interface IProtoRows { - - /** ProtoRows serializedRows */ - serializedRows?: (Uint8Array[]|null); - } - - /** Represents a ProtoRows. */ - class ProtoRows implements IProtoRows { - - /** - * Constructs a new ProtoRows. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IProtoRows); - - /** ProtoRows serializedRows. */ - public serializedRows: Uint8Array[]; - - /** - * Creates a new ProtoRows instance using the specified properties. - * @param [properties] Properties to set - * @returns ProtoRows instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IProtoRows): google.cloud.bigquery.storage.v1.ProtoRows; - - /** - * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. - * @param message ProtoRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. - * @param message ProtoRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ProtoRows message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ProtoRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoRows; - - /** - * Decodes a ProtoRows message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ProtoRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoRows; - - /** - * Verifies a ProtoRows message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ProtoRows - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoRows; - - /** - * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. - * @param message ProtoRows - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ProtoRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ProtoRows to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ProtoRows - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Represents a BigQueryRead */ - class BigQueryRead extends $protobuf.rpc.Service { - - /** - * Constructs a new BigQueryRead service. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - */ - constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); - - /** - * Creates new BigQueryRead service using the specified rpc implementation. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - * @returns RPC service. Useful where requests and/or responses are streamed. - */ - public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryRead; - - /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadSession - */ - public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback): void; - - /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @returns Promise - */ - public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): Promise; - - /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadRowsResponse - */ - public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback): void; - - /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @returns Promise - */ - public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest): Promise; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse - */ - public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback): void; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @returns Promise - */ - public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): Promise; - } - - namespace BigQueryRead { - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. - * @param error Error, if any - * @param [response] ReadSession - */ - type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadSession) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. - * @param error Error, if any - * @param [response] ReadRowsResponse - */ - type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadRowsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. - * @param error Error, if any - * @param [response] SplitReadStreamResponse - */ - type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.SplitReadStreamResponse) => void; - } - - /** Represents a BigQueryWrite */ - class BigQueryWrite extends $protobuf.rpc.Service { - - /** - * Constructs a new BigQueryWrite service. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - */ - constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); - - /** - * Creates new BigQueryWrite service using the specified rpc implementation. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - * @returns RPC service. Useful where requests and/or responses are streamed. - */ - public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryWrite; - - /** - * Calls CreateWriteStream. - * @param request CreateWriteStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and WriteStream - */ - public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback): void; - - /** - * Calls CreateWriteStream. - * @param request CreateWriteStreamRequest message or plain object - * @returns Promise - */ - public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): Promise; - - /** - * Calls AppendRows. - * @param request AppendRowsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and AppendRowsResponse - */ - public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback): void; - - /** - * Calls AppendRows. - * @param request AppendRowsRequest message or plain object - * @returns Promise - */ - public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest): Promise; - - /** - * Calls GetWriteStream. - * @param request GetWriteStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and WriteStream - */ - public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback): void; - - /** - * Calls GetWriteStream. - * @param request GetWriteStreamRequest message or plain object - * @returns Promise - */ - public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): Promise; - - /** - * Calls FinalizeWriteStream. - * @param request FinalizeWriteStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse - */ - public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback): void; - - /** - * Calls FinalizeWriteStream. - * @param request FinalizeWriteStreamRequest message or plain object - * @returns Promise - */ - public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): Promise; - - /** - * Calls BatchCommitWriteStreams. - * @param request BatchCommitWriteStreamsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse - */ - public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback): void; - - /** - * Calls BatchCommitWriteStreams. - * @param request BatchCommitWriteStreamsRequest message or plain object - * @returns Promise - */ - public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): Promise; - - /** - * Calls FlushRows. - * @param request FlushRowsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and FlushRowsResponse - */ - public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback): void; - - /** - * Calls FlushRows. - * @param request FlushRowsRequest message or plain object - * @returns Promise - */ - public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest): Promise; - } - - namespace BigQueryWrite { - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. - * @param error Error, if any - * @param [response] WriteStream - */ - type CreateWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. - * @param error Error, if any - * @param [response] AppendRowsResponse - */ - type AppendRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.AppendRowsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. - * @param error Error, if any - * @param [response] WriteStream - */ - type GetWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. - * @param error Error, if any - * @param [response] FinalizeWriteStreamResponse - */ - type FinalizeWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. - * @param error Error, if any - * @param [response] BatchCommitWriteStreamsResponse - */ - type BatchCommitWriteStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. - * @param error Error, if any - * @param [response] FlushRowsResponse - */ - type FlushRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FlushRowsResponse) => void; - } - - /** Properties of a CreateReadSessionRequest. */ - interface ICreateReadSessionRequest { - - /** CreateReadSessionRequest parent */ - parent?: (string|null); - - /** CreateReadSessionRequest readSession */ - readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); - - /** CreateReadSessionRequest maxStreamCount */ - maxStreamCount?: (number|null); - - /** CreateReadSessionRequest preferredMinStreamCount */ - preferredMinStreamCount?: (number|null); - } - - /** Represents a CreateReadSessionRequest. */ - class CreateReadSessionRequest implements ICreateReadSessionRequest { - - /** - * Constructs a new CreateReadSessionRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest); - - /** CreateReadSessionRequest parent. */ - public parent: string; - - /** CreateReadSessionRequest readSession. */ - public readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); - - /** CreateReadSessionRequest maxStreamCount. */ - public maxStreamCount: number; - - /** CreateReadSessionRequest preferredMinStreamCount. */ - public preferredMinStreamCount: number; - - /** - * Creates a new CreateReadSessionRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns CreateReadSessionRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; - - /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; - - /** - * Verifies a CreateReadSessionRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns CreateReadSessionRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; - - /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. - * @param message CreateReadSessionRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this CreateReadSessionRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for CreateReadSessionRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ReadRowsRequest. */ - interface IReadRowsRequest { - - /** ReadRowsRequest readStream */ - readStream?: (string|null); - - /** ReadRowsRequest offset */ - offset?: (number|Long|string|null); - } - - /** Represents a ReadRowsRequest. */ - class ReadRowsRequest implements IReadRowsRequest { - - /** - * Constructs a new ReadRowsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest); - - /** ReadRowsRequest readStream. */ - public readStream: string; - - /** ReadRowsRequest offset. */ - public offset: (number|Long|string); - - /** - * Creates a new ReadRowsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadRowsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest): google.cloud.bigquery.storage.v1.ReadRowsRequest; - - /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsRequest; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsRequest; - - /** - * Verifies a ReadRowsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadRowsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsRequest; - - /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. - * @param message ReadRowsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadRowsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadRowsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ThrottleState. */ - interface IThrottleState { - - /** ThrottleState throttlePercent */ - throttlePercent?: (number|null); - } - - /** Represents a ThrottleState. */ - class ThrottleState implements IThrottleState { - - /** - * Constructs a new ThrottleState. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IThrottleState); - - /** ThrottleState throttlePercent. */ - public throttlePercent: number; - - /** - * Creates a new ThrottleState instance using the specified properties. - * @param [properties] Properties to set - * @returns ThrottleState instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IThrottleState): google.cloud.bigquery.storage.v1.ThrottleState; - - /** - * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. - * @param message ThrottleState message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. - * @param message ThrottleState message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ThrottleState message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ThrottleState - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ThrottleState; - - /** - * Decodes a ThrottleState message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ThrottleState - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ThrottleState; - - /** - * Verifies a ThrottleState message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ThrottleState - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ThrottleState; - - /** - * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. - * @param message ThrottleState - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ThrottleState, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ThrottleState to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ThrottleState - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a StreamStats. */ - interface IStreamStats { - - /** StreamStats progress */ - progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); - } - - /** Represents a StreamStats. */ - class StreamStats implements IStreamStats { - - /** - * Constructs a new StreamStats. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IStreamStats); - - /** StreamStats progress. */ - public progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); - - /** - * Creates a new StreamStats instance using the specified properties. - * @param [properties] Properties to set - * @returns StreamStats instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IStreamStats): google.cloud.bigquery.storage.v1.StreamStats; - - /** - * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. - * @param message StreamStats message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. - * @param message StreamStats message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StreamStats message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StreamStats - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats; - - /** - * Decodes a StreamStats message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StreamStats - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats; - - /** - * Verifies a StreamStats message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StreamStats - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats; - - /** - * Creates a plain object from a StreamStats message. Also converts values to other types if specified. - * @param message StreamStats - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StreamStats to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for StreamStats - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace StreamStats { - - /** Properties of a Progress. */ - interface IProgress { - - /** Progress atResponseStart */ - atResponseStart?: (number|null); - - /** Progress atResponseEnd */ - atResponseEnd?: (number|null); - } - - /** Represents a Progress. */ - class Progress implements IProgress { - - /** - * Constructs a new Progress. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress); - - /** Progress atResponseStart. */ - public atResponseStart: number; - - /** Progress atResponseEnd. */ - public atResponseEnd: number; - - /** - * Creates a new Progress instance using the specified properties. - * @param [properties] Properties to set - * @returns Progress instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress): google.cloud.bigquery.storage.v1.StreamStats.Progress; - - /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Progress message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats.Progress; - - /** - * Decodes a Progress message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats.Progress; - - /** - * Verifies a Progress message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Progress - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats.Progress; - - /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. - * @param message Progress - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Progress to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Progress - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of a ReadRowsResponse. */ - interface IReadRowsResponse { - - /** ReadRowsResponse avroRows */ - avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); - - /** ReadRowsResponse arrowRecordBatch */ - arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); - - /** ReadRowsResponse rowCount */ - rowCount?: (number|Long|string|null); - - /** ReadRowsResponse stats */ - stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); - - /** ReadRowsResponse throttleState */ - throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); - - /** ReadRowsResponse avroSchema */ - avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); - - /** ReadRowsResponse arrowSchema */ - arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - } - - /** Represents a ReadRowsResponse. */ - class ReadRowsResponse implements IReadRowsResponse { - - /** - * Constructs a new ReadRowsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse); - - /** ReadRowsResponse avroRows. */ - public avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); - - /** ReadRowsResponse arrowRecordBatch. */ - public arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); - - /** ReadRowsResponse rowCount. */ - public rowCount: (number|Long|string); - - /** ReadRowsResponse stats. */ - public stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); - - /** ReadRowsResponse throttleState. */ - public throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); - - /** ReadRowsResponse avroSchema. */ - public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); - - /** ReadRowsResponse arrowSchema. */ - public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - - /** ReadRowsResponse rows. */ - public rows?: ("avroRows"|"arrowRecordBatch"); - - /** ReadRowsResponse schema. */ - public schema?: ("avroSchema"|"arrowSchema"); - - /** - * Creates a new ReadRowsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadRowsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse): google.cloud.bigquery.storage.v1.ReadRowsResponse; - - /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsResponse; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsResponse; - - /** - * Verifies a ReadRowsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadRowsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsResponse; - - /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. - * @param message ReadRowsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadRowsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadRowsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a SplitReadStreamRequest. */ - interface ISplitReadStreamRequest { - - /** SplitReadStreamRequest name */ - name?: (string|null); - - /** SplitReadStreamRequest fraction */ - fraction?: (number|null); - } - - /** Represents a SplitReadStreamRequest. */ - class SplitReadStreamRequest implements ISplitReadStreamRequest { - - /** - * Constructs a new SplitReadStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest); - - /** SplitReadStreamRequest name. */ - public name: string; - - /** SplitReadStreamRequest fraction. */ - public fraction: number; - - /** - * Creates a new SplitReadStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns SplitReadStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; - - /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; - - /** - * Verifies a SplitReadStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SplitReadStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; - - /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. - * @param message SplitReadStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SplitReadStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SplitReadStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a SplitReadStreamResponse. */ - interface ISplitReadStreamResponse { - - /** SplitReadStreamResponse primaryStream */ - primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); - - /** SplitReadStreamResponse remainderStream */ - remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); - } - - /** Represents a SplitReadStreamResponse. */ - class SplitReadStreamResponse implements ISplitReadStreamResponse { - - /** - * Constructs a new SplitReadStreamResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse); - - /** SplitReadStreamResponse primaryStream. */ - public primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); - - /** SplitReadStreamResponse remainderStream. */ - public remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); - - /** - * Creates a new SplitReadStreamResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns SplitReadStreamResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; - - /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; - - /** - * Verifies a SplitReadStreamResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SplitReadStreamResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; - - /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. - * @param message SplitReadStreamResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SplitReadStreamResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SplitReadStreamResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a CreateWriteStreamRequest. */ - interface ICreateWriteStreamRequest { - - /** CreateWriteStreamRequest parent */ - parent?: (string|null); - - /** CreateWriteStreamRequest writeStream */ - writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); - } - - /** Represents a CreateWriteStreamRequest. */ - class CreateWriteStreamRequest implements ICreateWriteStreamRequest { - - /** - * Constructs a new CreateWriteStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest); - - /** CreateWriteStreamRequest parent. */ - public parent: string; - - /** CreateWriteStreamRequest writeStream. */ - public writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); - - /** - * Creates a new CreateWriteStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns CreateWriteStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - - /** - * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. - * @param message CreateWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. - * @param message CreateWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns CreateWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - - /** - * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns CreateWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - - /** - * Verifies a CreateWriteStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns CreateWriteStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; - - /** - * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. - * @param message CreateWriteStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this CreateWriteStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for CreateWriteStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an AppendRowsRequest. */ - interface IAppendRowsRequest { - - /** AppendRowsRequest writeStream */ - writeStream?: (string|null); - - /** AppendRowsRequest offset */ - offset?: (google.protobuf.IInt64Value|null); - - /** AppendRowsRequest protoRows */ - protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); - - /** AppendRowsRequest traceId */ - traceId?: (string|null); - } - - /** Represents an AppendRowsRequest. */ - class AppendRowsRequest implements IAppendRowsRequest { - - /** - * Constructs a new AppendRowsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest); - - /** AppendRowsRequest writeStream. */ - public writeStream: string; - - /** AppendRowsRequest offset. */ - public offset?: (google.protobuf.IInt64Value|null); - - /** AppendRowsRequest protoRows. */ - public protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); - - /** AppendRowsRequest traceId. */ - public traceId: string; - - /** AppendRowsRequest rows. */ - public rows?: "protoRows"; - - /** - * Creates a new AppendRowsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns AppendRowsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest): google.cloud.bigquery.storage.v1.AppendRowsRequest; - - /** - * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. - * @param message AppendRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. - * @param message AppendRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AppendRowsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AppendRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest; - - /** - * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AppendRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest; - - /** - * Verifies an AppendRowsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AppendRowsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest; - - /** - * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. - * @param message AppendRowsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AppendRowsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AppendRowsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace AppendRowsRequest { - - /** Properties of a ProtoData. */ - interface IProtoData { - - /** ProtoData writerSchema */ - writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); - - /** ProtoData rows */ - rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); - } - - /** Represents a ProtoData. */ - class ProtoData implements IProtoData { - - /** - * Constructs a new ProtoData. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData); - - /** ProtoData writerSchema. */ - public writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); - - /** ProtoData rows. */ - public rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); - - /** - * Creates a new ProtoData instance using the specified properties. - * @param [properties] Properties to set - * @returns ProtoData instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; - - /** - * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. - * @param message ProtoData message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. - * @param message ProtoData message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ProtoData message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ProtoData - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; - - /** - * Decodes a ProtoData message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ProtoData - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; - - /** - * Verifies a ProtoData message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ProtoData - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; - - /** - * Creates a plain object from a ProtoData message. Also converts values to other types if specified. - * @param message ProtoData - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ProtoData to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ProtoData - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of an AppendRowsResponse. */ - interface IAppendRowsResponse { - - /** AppendRowsResponse appendResult */ - appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); - - /** AppendRowsResponse error */ - error?: (google.rpc.IStatus|null); - - /** AppendRowsResponse updatedSchema */ - updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); - - /** AppendRowsResponse rowErrors */ - rowErrors?: (google.cloud.bigquery.storage.v1.IRowError[]|null); - - /** AppendRowsResponse writeStream */ - writeStream?: (string|null); - } - - /** Represents an AppendRowsResponse. */ - class AppendRowsResponse implements IAppendRowsResponse { - - /** - * Constructs a new AppendRowsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse); - - /** AppendRowsResponse appendResult. */ - public appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); - - /** AppendRowsResponse error. */ - public error?: (google.rpc.IStatus|null); - - /** AppendRowsResponse updatedSchema. */ - public updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); - - /** AppendRowsResponse rowErrors. */ - public rowErrors: google.cloud.bigquery.storage.v1.IRowError[]; - - /** AppendRowsResponse writeStream. */ - public writeStream: string; - - /** AppendRowsResponse response. */ - public response?: ("appendResult"|"error"); - - /** - * Creates a new AppendRowsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns AppendRowsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse): google.cloud.bigquery.storage.v1.AppendRowsResponse; - - /** - * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. - * @param message AppendRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. - * @param message AppendRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AppendRowsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AppendRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse; - - /** - * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AppendRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse; - - /** - * Verifies an AppendRowsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AppendRowsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse; - - /** - * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. - * @param message AppendRowsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AppendRowsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AppendRowsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace AppendRowsResponse { - - /** Properties of an AppendResult. */ - interface IAppendResult { - - /** AppendResult offset */ - offset?: (google.protobuf.IInt64Value|null); - } - - /** Represents an AppendResult. */ - class AppendResult implements IAppendResult { - - /** - * Constructs a new AppendResult. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult); - - /** AppendResult offset. */ - public offset?: (google.protobuf.IInt64Value|null); - - /** - * Creates a new AppendResult instance using the specified properties. - * @param [properties] Properties to set - * @returns AppendResult instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; - - /** - * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. - * @param message AppendResult message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. - * @param message AppendResult message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AppendResult message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AppendResult - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; - - /** - * Decodes an AppendResult message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AppendResult - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; - - /** - * Verifies an AppendResult message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AppendResult - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; - - /** - * Creates a plain object from an AppendResult message. Also converts values to other types if specified. - * @param message AppendResult - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AppendResult to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AppendResult - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of a GetWriteStreamRequest. */ - interface IGetWriteStreamRequest { - - /** GetWriteStreamRequest name */ - name?: (string|null); - - /** GetWriteStreamRequest view */ - view?: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView|null); - } - - /** Represents a GetWriteStreamRequest. */ - class GetWriteStreamRequest implements IGetWriteStreamRequest { - - /** - * Constructs a new GetWriteStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest); - - /** GetWriteStreamRequest name. */ - public name: string; - - /** GetWriteStreamRequest view. */ - public view: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView); - - /** - * Creates a new GetWriteStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns GetWriteStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; - - /** - * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. - * @param message GetWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. - * @param message GetWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a GetWriteStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns GetWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; - - /** - * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns GetWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; - - /** - * Verifies a GetWriteStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns GetWriteStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; - - /** - * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. - * @param message GetWriteStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.GetWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this GetWriteStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for GetWriteStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BatchCommitWriteStreamsRequest. */ - interface IBatchCommitWriteStreamsRequest { - - /** BatchCommitWriteStreamsRequest parent */ - parent?: (string|null); - - /** BatchCommitWriteStreamsRequest writeStreams */ - writeStreams?: (string[]|null); - } - - /** Represents a BatchCommitWriteStreamsRequest. */ - class BatchCommitWriteStreamsRequest implements IBatchCommitWriteStreamsRequest { - - /** - * Constructs a new BatchCommitWriteStreamsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest); - - /** BatchCommitWriteStreamsRequest parent. */ - public parent: string; - - /** BatchCommitWriteStreamsRequest writeStreams. */ - public writeStreams: string[]; - - /** - * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns BatchCommitWriteStreamsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; - - /** - * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. - * @param message BatchCommitWriteStreamsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. - * @param message BatchCommitWriteStreamsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BatchCommitWriteStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; - - /** - * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BatchCommitWriteStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; - - /** - * Verifies a BatchCommitWriteStreamsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BatchCommitWriteStreamsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; - - /** - * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. - * @param message BatchCommitWriteStreamsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BatchCommitWriteStreamsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BatchCommitWriteStreamsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BatchCommitWriteStreamsResponse. */ - interface IBatchCommitWriteStreamsResponse { - - /** BatchCommitWriteStreamsResponse commitTime */ - commitTime?: (google.protobuf.ITimestamp|null); - - /** BatchCommitWriteStreamsResponse streamErrors */ - streamErrors?: (google.cloud.bigquery.storage.v1.IStorageError[]|null); - } - - /** Represents a BatchCommitWriteStreamsResponse. */ - class BatchCommitWriteStreamsResponse implements IBatchCommitWriteStreamsResponse { - - /** - * Constructs a new BatchCommitWriteStreamsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse); - - /** BatchCommitWriteStreamsResponse commitTime. */ - public commitTime?: (google.protobuf.ITimestamp|null); - - /** BatchCommitWriteStreamsResponse streamErrors. */ - public streamErrors: google.cloud.bigquery.storage.v1.IStorageError[]; - - /** - * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns BatchCommitWriteStreamsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; - - /** - * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. - * @param message BatchCommitWriteStreamsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. - * @param message BatchCommitWriteStreamsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BatchCommitWriteStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; - - /** - * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BatchCommitWriteStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; - - /** - * Verifies a BatchCommitWriteStreamsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BatchCommitWriteStreamsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; - - /** - * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. - * @param message BatchCommitWriteStreamsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BatchCommitWriteStreamsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BatchCommitWriteStreamsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FinalizeWriteStreamRequest. */ - interface IFinalizeWriteStreamRequest { - - /** FinalizeWriteStreamRequest name */ - name?: (string|null); - } - - /** Represents a FinalizeWriteStreamRequest. */ - class FinalizeWriteStreamRequest implements IFinalizeWriteStreamRequest { - - /** - * Constructs a new FinalizeWriteStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest); - - /** FinalizeWriteStreamRequest name. */ - public name: string; - - /** - * Creates a new FinalizeWriteStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns FinalizeWriteStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; - - /** - * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. - * @param message FinalizeWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. - * @param message FinalizeWriteStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FinalizeWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; - - /** - * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FinalizeWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; - - /** - * Verifies a FinalizeWriteStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FinalizeWriteStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; - - /** - * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. - * @param message FinalizeWriteStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FinalizeWriteStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FinalizeWriteStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FinalizeWriteStreamResponse. */ - interface IFinalizeWriteStreamResponse { - - /** FinalizeWriteStreamResponse rowCount */ - rowCount?: (number|Long|string|null); - } - - /** Represents a FinalizeWriteStreamResponse. */ - class FinalizeWriteStreamResponse implements IFinalizeWriteStreamResponse { - - /** - * Constructs a new FinalizeWriteStreamResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse); - - /** FinalizeWriteStreamResponse rowCount. */ - public rowCount: (number|Long|string); - - /** - * Creates a new FinalizeWriteStreamResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns FinalizeWriteStreamResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; - - /** - * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. - * @param message FinalizeWriteStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. - * @param message FinalizeWriteStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FinalizeWriteStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; - - /** - * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FinalizeWriteStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; - - /** - * Verifies a FinalizeWriteStreamResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FinalizeWriteStreamResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; - - /** - * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. - * @param message FinalizeWriteStreamResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FinalizeWriteStreamResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FinalizeWriteStreamResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FlushRowsRequest. */ - interface IFlushRowsRequest { - - /** FlushRowsRequest writeStream */ - writeStream?: (string|null); - - /** FlushRowsRequest offset */ - offset?: (google.protobuf.IInt64Value|null); - } - - /** Represents a FlushRowsRequest. */ - class FlushRowsRequest implements IFlushRowsRequest { - - /** - * Constructs a new FlushRowsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest); - - /** FlushRowsRequest writeStream. */ - public writeStream: string; - - /** FlushRowsRequest offset. */ - public offset?: (google.protobuf.IInt64Value|null); - - /** - * Creates a new FlushRowsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns FlushRowsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest): google.cloud.bigquery.storage.v1.FlushRowsRequest; - - /** - * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. - * @param message FlushRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. - * @param message FlushRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FlushRowsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FlushRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsRequest; - - /** - * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FlushRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsRequest; - - /** - * Verifies a FlushRowsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FlushRowsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsRequest; - - /** - * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. - * @param message FlushRowsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FlushRowsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FlushRowsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FlushRowsResponse. */ - interface IFlushRowsResponse { - - /** FlushRowsResponse offset */ - offset?: (number|Long|string|null); - } - - /** Represents a FlushRowsResponse. */ - class FlushRowsResponse implements IFlushRowsResponse { - - /** - * Constructs a new FlushRowsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse); - - /** FlushRowsResponse offset. */ - public offset: (number|Long|string); - - /** - * Creates a new FlushRowsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns FlushRowsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse): google.cloud.bigquery.storage.v1.FlushRowsResponse; - - /** - * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. - * @param message FlushRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. - * @param message FlushRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FlushRowsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FlushRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsResponse; - - /** - * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FlushRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsResponse; - - /** - * Verifies a FlushRowsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FlushRowsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsResponse; - - /** - * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. - * @param message FlushRowsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FlushRowsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FlushRowsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a StorageError. */ - interface IStorageError { - - /** StorageError code */ - code?: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null); - - /** StorageError entity */ - entity?: (string|null); - - /** StorageError errorMessage */ - errorMessage?: (string|null); - } - - /** Represents a StorageError. */ - class StorageError implements IStorageError { - - /** - * Constructs a new StorageError. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IStorageError); - - /** StorageError code. */ - public code: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode); - - /** StorageError entity. */ - public entity: string; - - /** StorageError errorMessage. */ - public errorMessage: string; - - /** - * Creates a new StorageError instance using the specified properties. - * @param [properties] Properties to set - * @returns StorageError instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IStorageError): google.cloud.bigquery.storage.v1.StorageError; - - /** - * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. - * @param message StorageError message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. - * @param message StorageError message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StorageError message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StorageError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StorageError; - - /** - * Decodes a StorageError message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StorageError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StorageError; - - /** - * Verifies a StorageError message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StorageError message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StorageError - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StorageError; - - /** - * Creates a plain object from a StorageError message. Also converts values to other types if specified. - * @param message StorageError - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.StorageError, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StorageError to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for StorageError - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace StorageError { - - /** StorageErrorCode enum. */ - enum StorageErrorCode { - STORAGE_ERROR_CODE_UNSPECIFIED = 0, - TABLE_NOT_FOUND = 1, - STREAM_ALREADY_COMMITTED = 2, - STREAM_NOT_FOUND = 3, - INVALID_STREAM_TYPE = 4, - INVALID_STREAM_STATE = 5, - STREAM_FINALIZED = 6, - SCHEMA_MISMATCH_EXTRA_FIELDS = 7, - OFFSET_ALREADY_EXISTS = 8, - OFFSET_OUT_OF_RANGE = 9 - } - } - - /** Properties of a RowError. */ - interface IRowError { - - /** RowError index */ - index?: (number|Long|string|null); - - /** RowError code */ - code?: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null); - - /** RowError message */ - message?: (string|null); - } - - /** Represents a RowError. */ - class RowError implements IRowError { - - /** - * Constructs a new RowError. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IRowError); - - /** RowError index. */ - public index: (number|Long|string); - - /** RowError code. */ - public code: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode); - - /** RowError message. */ - public message: string; - - /** - * Creates a new RowError instance using the specified properties. - * @param [properties] Properties to set - * @returns RowError instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IRowError): google.cloud.bigquery.storage.v1.RowError; - - /** - * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. - * @param message RowError message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. - * @param message RowError message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a RowError message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns RowError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.RowError; - - /** - * Decodes a RowError message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns RowError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.RowError; - - /** - * Verifies a RowError message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a RowError message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns RowError - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.RowError; - - /** - * Creates a plain object from a RowError message. Also converts values to other types if specified. - * @param message RowError - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.RowError, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this RowError to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for RowError - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace RowError { - - /** RowErrorCode enum. */ - enum RowErrorCode { - ROW_ERROR_CODE_UNSPECIFIED = 0, - FIELDS_ERROR = 1 - } - } - - /** DataFormat enum. */ - enum DataFormat { - DATA_FORMAT_UNSPECIFIED = 0, - AVRO = 1, - ARROW = 2 - } - - /** Properties of a ReadSession. */ - interface IReadSession { - - /** ReadSession name */ - name?: (string|null); - - /** ReadSession expireTime */ - expireTime?: (google.protobuf.ITimestamp|null); - - /** ReadSession dataFormat */ - dataFormat?: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat|null); - - /** ReadSession avroSchema */ - avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); - - /** ReadSession arrowSchema */ - arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - - /** ReadSession table */ - table?: (string|null); - - /** ReadSession tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); - - /** ReadSession readOptions */ - readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); - - /** ReadSession streams */ - streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); - - /** ReadSession estimatedTotalBytesScanned */ - estimatedTotalBytesScanned?: (number|Long|string|null); - - /** ReadSession traceId */ - traceId?: (string|null); - } - - /** Represents a ReadSession. */ - class ReadSession implements IReadSession { - - /** - * Constructs a new ReadSession. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadSession); - - /** ReadSession name. */ - public name: string; - - /** ReadSession expireTime. */ - public expireTime?: (google.protobuf.ITimestamp|null); - - /** ReadSession dataFormat. */ - public dataFormat: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat); - - /** ReadSession avroSchema. */ - public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); - - /** ReadSession arrowSchema. */ - public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); - - /** ReadSession table. */ - public table: string; - - /** ReadSession tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); - - /** ReadSession readOptions. */ - public readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); - - /** ReadSession streams. */ - public streams: google.cloud.bigquery.storage.v1.IReadStream[]; - - /** ReadSession estimatedTotalBytesScanned. */ - public estimatedTotalBytesScanned: (number|Long|string); - - /** ReadSession traceId. */ - public traceId: string; - - /** ReadSession schema. */ - public schema?: ("avroSchema"|"arrowSchema"); - - /** - * Creates a new ReadSession instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadSession instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadSession): google.cloud.bigquery.storage.v1.ReadSession; - - /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadSession message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession; - - /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession; - - /** - * Verifies a ReadSession message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadSession - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession; - - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @param message ReadSession - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadSession to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadSession - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace ReadSession { - - /** Properties of a TableModifiers. */ - interface ITableModifiers { - - /** TableModifiers snapshotTime */ - snapshotTime?: (google.protobuf.ITimestamp|null); - } - - /** Represents a TableModifiers. */ - class TableModifiers implements ITableModifiers { - - /** - * Constructs a new TableModifiers. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers); - - /** TableModifiers snapshotTime. */ - public snapshotTime?: (google.protobuf.ITimestamp|null); - - /** - * Creates a new TableModifiers instance using the specified properties. - * @param [properties] Properties to set - * @returns TableModifiers instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - - /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableModifiers message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - - /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - - /** - * Verifies a TableModifiers message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableModifiers - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; - - /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @param message TableModifiers - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableModifiers to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableModifiers - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a TableReadOptions. */ - interface ITableReadOptions { - - /** TableReadOptions selectedFields */ - selectedFields?: (string[]|null); - - /** TableReadOptions rowRestriction */ - rowRestriction?: (string|null); - - /** TableReadOptions arrowSerializationOptions */ - arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); - - /** TableReadOptions avroSerializationOptions */ - avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); - } - - /** Represents a TableReadOptions. */ - class TableReadOptions implements ITableReadOptions { - - /** - * Constructs a new TableReadOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions); - - /** TableReadOptions selectedFields. */ - public selectedFields: string[]; - - /** TableReadOptions rowRestriction. */ - public rowRestriction: string; - - /** TableReadOptions arrowSerializationOptions. */ - public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); - - /** TableReadOptions avroSerializationOptions. */ - public avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); - - /** TableReadOptions outputFormatSerializationOptions. */ - public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); - - /** - * Creates a new TableReadOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns TableReadOptions instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; - - /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; - - /** - * Verifies a TableReadOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableReadOptions - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; - - /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @param message TableReadOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableReadOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableReadOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of a ReadStream. */ - interface IReadStream { - - /** ReadStream name */ - name?: (string|null); - } - - /** Represents a ReadStream. */ - class ReadStream implements IReadStream { - - /** - * Constructs a new ReadStream. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IReadStream); - - /** ReadStream name. */ - public name: string; - - /** - * Creates a new ReadStream instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadStream instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IReadStream): google.cloud.bigquery.storage.v1.ReadStream; - - /** - * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @param message ReadStream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @param message ReadStream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadStream message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadStream; - - /** - * Decodes a ReadStream message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadStream; - - /** - * Verifies a ReadStream message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadStream - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadStream; - - /** - * Creates a plain object from a ReadStream message. Also converts values to other types if specified. - * @param message ReadStream - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadStream to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadStream - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** WriteStreamView enum. */ - enum WriteStreamView { - WRITE_STREAM_VIEW_UNSPECIFIED = 0, - BASIC = 1, - FULL = 2 - } - - /** Properties of a WriteStream. */ - interface IWriteStream { - - /** WriteStream name */ - name?: (string|null); - - /** WriteStream type */ - type?: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type|null); - - /** WriteStream createTime */ - createTime?: (google.protobuf.ITimestamp|null); - - /** WriteStream commitTime */ - commitTime?: (google.protobuf.ITimestamp|null); - - /** WriteStream tableSchema */ - tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); - - /** WriteStream writeMode */ - writeMode?: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null); - - /** WriteStream location */ - location?: (string|null); - } - - /** Represents a WriteStream. */ - class WriteStream implements IWriteStream { - - /** - * Constructs a new WriteStream. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.IWriteStream); - - /** WriteStream name. */ - public name: string; - - /** WriteStream type. */ - public type: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type); - - /** WriteStream createTime. */ - public createTime?: (google.protobuf.ITimestamp|null); - - /** WriteStream commitTime. */ - public commitTime?: (google.protobuf.ITimestamp|null); - - /** WriteStream tableSchema. */ - public tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); - - /** WriteStream writeMode. */ - public writeMode: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode); - - /** WriteStream location. */ - public location: string; - - /** - * Creates a new WriteStream instance using the specified properties. - * @param [properties] Properties to set - * @returns WriteStream instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.IWriteStream): google.cloud.bigquery.storage.v1.WriteStream; - - /** - * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. - * @param message WriteStream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. - * @param message WriteStream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a WriteStream message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns WriteStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.WriteStream; - - /** - * Decodes a WriteStream message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns WriteStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.WriteStream; - - /** - * Verifies a WriteStream message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns WriteStream - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.WriteStream; - - /** - * Creates a plain object from a WriteStream message. Also converts values to other types if specified. - * @param message WriteStream - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.WriteStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this WriteStream to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for WriteStream - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace WriteStream { - - /** Type enum. */ - enum Type { - TYPE_UNSPECIFIED = 0, - COMMITTED = 1, - PENDING = 2, - BUFFERED = 3 - } - - /** WriteMode enum. */ - enum WriteMode { - WRITE_MODE_UNSPECIFIED = 0, - INSERT = 1 - } - } - - /** Properties of a TableSchema. */ - interface ITableSchema { - - /** TableSchema fields */ - fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); - } - - /** Represents a TableSchema. */ - class TableSchema implements ITableSchema { - - /** - * Constructs a new TableSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ITableSchema); - - /** TableSchema fields. */ - public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; - - /** - * Creates a new TableSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns TableSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ITableSchema): google.cloud.bigquery.storage.v1.TableSchema; - - /** - * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. - * @param message TableSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. - * @param message TableSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableSchema; - - /** - * Decodes a TableSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableSchema; - - /** - * Verifies a TableSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableSchema; - - /** - * Creates a plain object from a TableSchema message. Also converts values to other types if specified. - * @param message TableSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.TableSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a TableFieldSchema. */ - interface ITableFieldSchema { - - /** TableFieldSchema name */ - name?: (string|null); - - /** TableFieldSchema type */ - type?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null); - - /** TableFieldSchema mode */ - mode?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null); - - /** TableFieldSchema fields */ - fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); - - /** TableFieldSchema description */ - description?: (string|null); - - /** TableFieldSchema maxLength */ - maxLength?: (number|Long|string|null); - - /** TableFieldSchema precision */ - precision?: (number|Long|string|null); - - /** TableFieldSchema scale */ - scale?: (number|Long|string|null); - } - - /** Represents a TableFieldSchema. */ - class TableFieldSchema implements ITableFieldSchema { - - /** - * Constructs a new TableFieldSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema); - - /** TableFieldSchema name. */ - public name: string; - - /** TableFieldSchema type. */ - public type: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type); - - /** TableFieldSchema mode. */ - public mode: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode); - - /** TableFieldSchema fields. */ - public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; - - /** TableFieldSchema description. */ - public description: string; - - /** TableFieldSchema maxLength. */ - public maxLength: (number|Long|string); - - /** TableFieldSchema precision. */ - public precision: (number|Long|string); - - /** TableFieldSchema scale. */ - public scale: (number|Long|string); - - /** - * Creates a new TableFieldSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns TableFieldSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema): google.cloud.bigquery.storage.v1.TableFieldSchema; - - /** - * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. - * @param message TableFieldSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. - * @param message TableFieldSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableFieldSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableFieldSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableFieldSchema; - - /** - * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableFieldSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableFieldSchema; - - /** - * Verifies a TableFieldSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableFieldSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableFieldSchema; - - /** - * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. - * @param message TableFieldSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1.TableFieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableFieldSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableFieldSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace TableFieldSchema { - - /** Type enum. */ - enum Type { - TYPE_UNSPECIFIED = 0, - STRING = 1, - INT64 = 2, - DOUBLE = 3, - STRUCT = 4, - BYTES = 5, - BOOL = 6, - TIMESTAMP = 7, - DATE = 8, - TIME = 9, - DATETIME = 10, - GEOGRAPHY = 11, - NUMERIC = 12, - BIGNUMERIC = 13, - INTERVAL = 14, - JSON = 15 - } - - /** Mode enum. */ - enum Mode { - MODE_UNSPECIFIED = 0, - NULLABLE = 1, - REQUIRED = 2, - REPEATED = 3 - } - } - } - - /** Namespace v1beta1. */ - namespace v1beta1 { - - /** Properties of an ArrowSchema. */ - interface IArrowSchema { - - /** ArrowSchema serializedSchema */ - serializedSchema?: (Uint8Array|string|null); - } - - /** Represents an ArrowSchema. */ - class ArrowSchema implements IArrowSchema { - - /** - * Constructs a new ArrowSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); - - /** ArrowSchema serializedSchema. */ - public serializedSchema: (Uint8Array|string); - - /** - * Creates a new ArrowSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns ArrowSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema): google.cloud.bigquery.storage.v1beta1.ArrowSchema; - - /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @param message ArrowSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowSchema; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowSchema; - - /** - * Verifies an ArrowSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ArrowSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowSchema; - - /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. - * @param message ArrowSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ArrowSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ArrowSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an ArrowRecordBatch. */ - interface IArrowRecordBatch { - - /** ArrowRecordBatch serializedRecordBatch */ - serializedRecordBatch?: (Uint8Array|string|null); - - /** ArrowRecordBatch rowCount */ - rowCount?: (number|Long|string|null); - } - - /** Represents an ArrowRecordBatch. */ - class ArrowRecordBatch implements IArrowRecordBatch { - - /** - * Constructs a new ArrowRecordBatch. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); - - /** ArrowRecordBatch serializedRecordBatch. */ - public serializedRecordBatch: (Uint8Array|string); - - /** ArrowRecordBatch rowCount. */ - public rowCount: (number|Long|string); - - /** - * Creates a new ArrowRecordBatch instance using the specified properties. - * @param [properties] Properties to set - * @returns ArrowRecordBatch instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; - - /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @param message ArrowRecordBatch message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; - - /** - * Verifies an ArrowRecordBatch message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ArrowRecordBatch - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; - - /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. - * @param message ArrowRecordBatch - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ArrowRecordBatch to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ArrowRecordBatch - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an AvroSchema. */ - interface IAvroSchema { - - /** AvroSchema schema */ - schema?: (string|null); - } - - /** Represents an AvroSchema. */ - class AvroSchema implements IAvroSchema { - - /** - * Constructs a new AvroSchema. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema); - - /** AvroSchema schema. */ - public schema: string; - - /** - * Creates a new AvroSchema instance using the specified properties. - * @param [properties] Properties to set - * @returns AvroSchema instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema): google.cloud.bigquery.storage.v1beta1.AvroSchema; - - /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @param message AvroSchema message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AvroSchema message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroSchema; - - /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroSchema; - - /** - * Verifies an AvroSchema message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AvroSchema - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroSchema; - - /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. - * @param message AvroSchema - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AvroSchema to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AvroSchema - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an AvroRows. */ - interface IAvroRows { - - /** AvroRows serializedBinaryRows */ - serializedBinaryRows?: (Uint8Array|string|null); - - /** AvroRows rowCount */ - rowCount?: (number|Long|string|null); - } - - /** Represents an AvroRows. */ - class AvroRows implements IAvroRows { - - /** - * Constructs a new AvroRows. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); - - /** AvroRows serializedBinaryRows. */ - public serializedBinaryRows: (Uint8Array|string); - - /** AvroRows rowCount. */ - public rowCount: (number|Long|string); - - /** - * Creates a new AvroRows instance using the specified properties. - * @param [properties] Properties to set - * @returns AvroRows instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows): google.cloud.bigquery.storage.v1beta1.AvroRows; - - /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @param message AvroRows message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an AvroRows message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroRows; - - /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroRows; - - /** - * Verifies an AvroRows message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns AvroRows - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroRows; - - /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. - * @param message AvroRows - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this AvroRows to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for AvroRows - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a TableReadOptions. */ - interface ITableReadOptions { - - /** TableReadOptions selectedFields */ - selectedFields?: (string[]|null); - - /** TableReadOptions rowRestriction */ - rowRestriction?: (string|null); - } - - /** Represents a TableReadOptions. */ - class TableReadOptions implements ITableReadOptions { - - /** - * Constructs a new TableReadOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions); - - /** TableReadOptions selectedFields. */ - public selectedFields: string[]; - - /** TableReadOptions rowRestriction. */ - public rowRestriction: string; - - /** - * Creates a new TableReadOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns TableReadOptions instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions): google.cloud.bigquery.storage.v1beta1.TableReadOptions; - - /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @param message TableReadOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReadOptions; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReadOptions; - - /** - * Verifies a TableReadOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableReadOptions - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReadOptions; - - /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @param message TableReadOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableReadOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableReadOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Represents a BigQueryStorage */ - class BigQueryStorage extends $protobuf.rpc.Service { - - /** - * Constructs a new BigQueryStorage service. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - */ - constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); - - /** - * Creates new BigQueryStorage service using the specified rpc implementation. - * @param rpcImpl RPC implementation - * @param [requestDelimited=false] Whether requests are length-delimited - * @param [responseDelimited=false] Whether responses are length-delimited - * @returns RPC service. Useful where requests and/or responses are streamed. - */ - public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryStorage; - - /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadSession - */ - public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback): void; - - /** - * Calls CreateReadSession. - * @param request CreateReadSessionRequest message or plain object - * @returns Promise - */ - public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): Promise; - - /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and ReadRowsResponse - */ - public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback): void; - - /** - * Calls ReadRows. - * @param request ReadRowsRequest message or plain object - * @returns Promise - */ - public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): Promise; - - /** - * Calls BatchCreateReadSessionStreams. - * @param request BatchCreateReadSessionStreamsRequest message or plain object - * @param callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse - */ - public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback): void; - - /** - * Calls BatchCreateReadSessionStreams. - * @param request BatchCreateReadSessionStreamsRequest message or plain object - * @returns Promise - */ - public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): Promise; - - /** - * Calls FinalizeStream. - * @param request FinalizeStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and Empty - */ - public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback): void; - - /** - * Calls FinalizeStream. - * @param request FinalizeStreamRequest message or plain object - * @returns Promise - */ - public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): Promise; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse - */ - public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback): void; - - /** - * Calls SplitReadStream. - * @param request SplitReadStreamRequest message or plain object - * @returns Promise - */ - public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): Promise; - } - - namespace BigQueryStorage { - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. - * @param error Error, if any - * @param [response] ReadSession - */ - type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. - * @param error Error, if any - * @param [response] ReadRowsResponse - */ - type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. - * @param error Error, if any - * @param [response] BatchCreateReadSessionStreamsResponse - */ - type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. - * @param error Error, if any - * @param [response] Empty - */ - type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. - * @param error Error, if any - * @param [response] SplitReadStreamResponse - */ - type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) => void; - } - - /** Properties of a Stream. */ - interface IStream { - - /** Stream name */ - name?: (string|null); - } - - /** Represents a Stream. */ - class Stream implements IStream { - - /** - * Constructs a new Stream. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStream); - - /** Stream name. */ - public name: string; - - /** - * Creates a new Stream instance using the specified properties. - * @param [properties] Properties to set - * @returns Stream instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStream): google.cloud.bigquery.storage.v1beta1.Stream; - - /** - * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @param message Stream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @param message Stream message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Stream message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Stream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Stream; - - /** - * Decodes a Stream message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Stream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Stream; - - /** - * Verifies a Stream message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Stream message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Stream - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Stream; - - /** - * Creates a plain object from a Stream message. Also converts values to other types if specified. - * @param message Stream - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.Stream, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Stream to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Stream - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a StreamPosition. */ - interface IStreamPosition { - - /** StreamPosition stream */ - stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** StreamPosition offset */ - offset?: (number|Long|string|null); - } - - /** Represents a StreamPosition. */ - class StreamPosition implements IStreamPosition { - - /** - * Constructs a new StreamPosition. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition); - - /** StreamPosition stream. */ - public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** StreamPosition offset. */ - public offset: (number|Long|string); - - /** - * Creates a new StreamPosition instance using the specified properties. - * @param [properties] Properties to set - * @returns StreamPosition instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @param message StreamPosition message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @param message StreamPosition message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StreamPosition message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Decodes a StreamPosition message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Verifies a StreamPosition message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StreamPosition - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamPosition; - - /** - * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. - * @param message StreamPosition - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamPosition, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StreamPosition to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for StreamPosition - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ReadSession. */ - interface IReadSession { - - /** ReadSession name */ - name?: (string|null); - - /** ReadSession expireTime */ - expireTime?: (google.protobuf.ITimestamp|null); - - /** ReadSession avroSchema */ - avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); - - /** ReadSession arrowSchema */ - arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); - - /** ReadSession streams */ - streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); - - /** ReadSession tableReference */ - tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); - - /** ReadSession tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); - - /** ReadSession shardingStrategy */ - shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); - } - - /** Represents a ReadSession. */ - class ReadSession implements IReadSession { - - /** - * Constructs a new ReadSession. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession); - - /** ReadSession name. */ - public name: string; - - /** ReadSession expireTime. */ - public expireTime?: (google.protobuf.ITimestamp|null); - - /** ReadSession avroSchema. */ - public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); - - /** ReadSession arrowSchema. */ - public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); - - /** ReadSession streams. */ - public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; - - /** ReadSession tableReference. */ - public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); - - /** ReadSession tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); - - /** ReadSession shardingStrategy. */ - public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); - - /** ReadSession schema. */ - public schema?: ("avroSchema"|"arrowSchema"); - - /** - * Creates a new ReadSession instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadSession instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession): google.cloud.bigquery.storage.v1beta1.ReadSession; - - /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @param message ReadSession message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadSession message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadSession; - - /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadSession; - - /** - * Verifies a ReadSession message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadSession - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadSession; - - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @param message ReadSession - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadSession to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadSession - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a CreateReadSessionRequest. */ - interface ICreateReadSessionRequest { - - /** CreateReadSessionRequest tableReference */ - tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); - - /** CreateReadSessionRequest parent */ - parent?: (string|null); - - /** CreateReadSessionRequest tableModifiers */ - tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); - - /** CreateReadSessionRequest requestedStreams */ - requestedStreams?: (number|null); - - /** CreateReadSessionRequest readOptions */ - readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); - - /** CreateReadSessionRequest format */ - format?: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat|null); - - /** CreateReadSessionRequest shardingStrategy */ - shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); - } - - /** Represents a CreateReadSessionRequest. */ - class CreateReadSessionRequest implements ICreateReadSessionRequest { - - /** - * Constructs a new CreateReadSessionRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest); - - /** CreateReadSessionRequest tableReference. */ - public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); - - /** CreateReadSessionRequest parent. */ - public parent: string; - - /** CreateReadSessionRequest tableModifiers. */ - public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); - - /** CreateReadSessionRequest requestedStreams. */ - public requestedStreams: number; - - /** CreateReadSessionRequest readOptions. */ - public readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); - - /** CreateReadSessionRequest format. */ - public format: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat); - - /** CreateReadSessionRequest shardingStrategy. */ - public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); - - /** - * Creates a new CreateReadSessionRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns CreateReadSessionRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; - - /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @param message CreateReadSessionRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; - - /** - * Verifies a CreateReadSessionRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns CreateReadSessionRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; - - /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. - * @param message CreateReadSessionRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this CreateReadSessionRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for CreateReadSessionRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** DataFormat enum. */ - enum DataFormat { - DATA_FORMAT_UNSPECIFIED = 0, - AVRO = 1, - ARROW = 3 - } - - /** ShardingStrategy enum. */ - enum ShardingStrategy { - SHARDING_STRATEGY_UNSPECIFIED = 0, - LIQUID = 1, - BALANCED = 2 - } - - /** Properties of a ReadRowsRequest. */ - interface IReadRowsRequest { - - /** ReadRowsRequest readPosition */ - readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); - } - - /** Represents a ReadRowsRequest. */ - class ReadRowsRequest implements IReadRowsRequest { - - /** - * Constructs a new ReadRowsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest); - - /** ReadRowsRequest readPosition. */ - public readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); - - /** - * Creates a new ReadRowsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadRowsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; - - /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @param message ReadRowsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; - - /** - * Verifies a ReadRowsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadRowsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; - - /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. - * @param message ReadRowsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadRowsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadRowsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a StreamStatus. */ - interface IStreamStatus { - - /** StreamStatus estimatedRowCount */ - estimatedRowCount?: (number|Long|string|null); - - /** StreamStatus fractionConsumed */ - fractionConsumed?: (number|null); - - /** StreamStatus progress */ - progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); - - /** StreamStatus isSplittable */ - isSplittable?: (boolean|null); - } - - /** Represents a StreamStatus. */ - class StreamStatus implements IStreamStatus { - - /** - * Constructs a new StreamStatus. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus); - - /** StreamStatus estimatedRowCount. */ - public estimatedRowCount: (number|Long|string); - - /** StreamStatus fractionConsumed. */ - public fractionConsumed: number; - - /** StreamStatus progress. */ - public progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); - - /** StreamStatus isSplittable. */ - public isSplittable: boolean; - - /** - * Creates a new StreamStatus instance using the specified properties. - * @param [properties] Properties to set - * @returns StreamStatus instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus): google.cloud.bigquery.storage.v1beta1.StreamStatus; - - /** - * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @param message StreamStatus message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @param message StreamStatus message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StreamStatus message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StreamStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamStatus; - - /** - * Decodes a StreamStatus message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StreamStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamStatus; - - /** - * Verifies a StreamStatus message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StreamStatus - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamStatus; - - /** - * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. - * @param message StreamStatus - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StreamStatus to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for StreamStatus - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a Progress. */ - interface IProgress { - - /** Progress atResponseStart */ - atResponseStart?: (number|null); - - /** Progress atResponseEnd */ - atResponseEnd?: (number|null); - } - - /** Represents a Progress. */ - class Progress implements IProgress { - - /** - * Constructs a new Progress. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IProgress); - - /** Progress atResponseStart. */ - public atResponseStart: number; - - /** Progress atResponseEnd. */ - public atResponseEnd: number; - - /** - * Creates a new Progress instance using the specified properties. - * @param [properties] Properties to set - * @returns Progress instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IProgress): google.cloud.bigquery.storage.v1beta1.Progress; - - /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @param message Progress message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Progress message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Progress; - - /** - * Decodes a Progress message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Progress; - - /** - * Verifies a Progress message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Progress - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Progress; - - /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. - * @param message Progress - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Progress to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Progress - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ThrottleStatus. */ - interface IThrottleStatus { - - /** ThrottleStatus throttlePercent */ - throttlePercent?: (number|null); - } - - /** Represents a ThrottleStatus. */ - class ThrottleStatus implements IThrottleStatus { - - /** - * Constructs a new ThrottleStatus. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus); - - /** ThrottleStatus throttlePercent. */ - public throttlePercent: number; - - /** - * Creates a new ThrottleStatus instance using the specified properties. - * @param [properties] Properties to set - * @returns ThrottleStatus instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; - - /** - * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @param message ThrottleStatus message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @param message ThrottleStatus message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ThrottleStatus message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ThrottleStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; - - /** - * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ThrottleStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; - - /** - * Verifies a ThrottleStatus message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ThrottleStatus - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; - - /** - * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. - * @param message ThrottleStatus - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ThrottleStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ThrottleStatus to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ThrottleStatus - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ReadRowsResponse. */ - interface IReadRowsResponse { - - /** ReadRowsResponse avroRows */ - avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); - - /** ReadRowsResponse arrowRecordBatch */ - arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); - - /** ReadRowsResponse rowCount */ - rowCount?: (number|Long|string|null); - - /** ReadRowsResponse status */ - status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); - - /** ReadRowsResponse throttleStatus */ - throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); - } - - /** Represents a ReadRowsResponse. */ - class ReadRowsResponse implements IReadRowsResponse { - - /** - * Constructs a new ReadRowsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse); - - /** ReadRowsResponse avroRows. */ - public avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); - - /** ReadRowsResponse arrowRecordBatch. */ - public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); - - /** ReadRowsResponse rowCount. */ - public rowCount: (number|Long|string); - - /** ReadRowsResponse status. */ - public status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); - - /** ReadRowsResponse throttleStatus. */ - public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); - - /** ReadRowsResponse rows. */ - public rows?: ("avroRows"|"arrowRecordBatch"); - - /** - * Creates a new ReadRowsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns ReadRowsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; - - /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @param message ReadRowsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; - - /** - * Verifies a ReadRowsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReadRowsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; - - /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. - * @param message ReadRowsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReadRowsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReadRowsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BatchCreateReadSessionStreamsRequest. */ - interface IBatchCreateReadSessionStreamsRequest { - - /** BatchCreateReadSessionStreamsRequest session */ - session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); - - /** BatchCreateReadSessionStreamsRequest requestedStreams */ - requestedStreams?: (number|null); - } - - /** Represents a BatchCreateReadSessionStreamsRequest. */ - class BatchCreateReadSessionStreamsRequest implements IBatchCreateReadSessionStreamsRequest { - - /** - * Constructs a new BatchCreateReadSessionStreamsRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest); - - /** BatchCreateReadSessionStreamsRequest session. */ - public session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); - - /** BatchCreateReadSessionStreamsRequest requestedStreams. */ - public requestedStreams: number; - - /** - * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns BatchCreateReadSessionStreamsRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; - - /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BatchCreateReadSessionStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; - - /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BatchCreateReadSessionStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; - - /** - * Verifies a BatchCreateReadSessionStreamsRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BatchCreateReadSessionStreamsRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; - - /** - * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. - * @param message BatchCreateReadSessionStreamsRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BatchCreateReadSessionStreamsRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BatchCreateReadSessionStreamsRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BatchCreateReadSessionStreamsResponse. */ - interface IBatchCreateReadSessionStreamsResponse { - - /** BatchCreateReadSessionStreamsResponse streams */ - streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); - } - - /** Represents a BatchCreateReadSessionStreamsResponse. */ - class BatchCreateReadSessionStreamsResponse implements IBatchCreateReadSessionStreamsResponse { - - /** - * Constructs a new BatchCreateReadSessionStreamsResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse); - - /** BatchCreateReadSessionStreamsResponse streams. */ - public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; - - /** - * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns BatchCreateReadSessionStreamsResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; - - /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BatchCreateReadSessionStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; - - /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BatchCreateReadSessionStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; - - /** - * Verifies a BatchCreateReadSessionStreamsResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BatchCreateReadSessionStreamsResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; - - /** - * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. - * @param message BatchCreateReadSessionStreamsResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BatchCreateReadSessionStreamsResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BatchCreateReadSessionStreamsResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FinalizeStreamRequest. */ - interface IFinalizeStreamRequest { - - /** FinalizeStreamRequest stream */ - stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - } - - /** Represents a FinalizeStreamRequest. */ - class FinalizeStreamRequest implements IFinalizeStreamRequest { - - /** - * Constructs a new FinalizeStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest); - - /** FinalizeStreamRequest stream. */ - public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** - * Creates a new FinalizeStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns FinalizeStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; - - /** - * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @param message FinalizeStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @param message FinalizeStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FinalizeStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; - - /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FinalizeStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; - - /** - * Verifies a FinalizeStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FinalizeStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; - - /** - * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. - * @param message FinalizeStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FinalizeStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FinalizeStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a SplitReadStreamRequest. */ - interface ISplitReadStreamRequest { - - /** SplitReadStreamRequest originalStream */ - originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** SplitReadStreamRequest fraction */ - fraction?: (number|null); - } - - /** Represents a SplitReadStreamRequest. */ - class SplitReadStreamRequest implements ISplitReadStreamRequest { - - /** - * Constructs a new SplitReadStreamRequest. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest); - - /** SplitReadStreamRequest originalStream. */ - public originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** SplitReadStreamRequest fraction. */ - public fraction: number; - - /** - * Creates a new SplitReadStreamRequest instance using the specified properties. - * @param [properties] Properties to set - * @returns SplitReadStreamRequest instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; - - /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @param message SplitReadStreamRequest message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; - - /** - * Verifies a SplitReadStreamRequest message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SplitReadStreamRequest - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; - - /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. - * @param message SplitReadStreamRequest - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SplitReadStreamRequest to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SplitReadStreamRequest - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a SplitReadStreamResponse. */ - interface ISplitReadStreamResponse { - - /** SplitReadStreamResponse primaryStream */ - primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** SplitReadStreamResponse remainderStream */ - remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - } - - /** Represents a SplitReadStreamResponse. */ - class SplitReadStreamResponse implements ISplitReadStreamResponse { - - /** - * Constructs a new SplitReadStreamResponse. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse); - - /** SplitReadStreamResponse primaryStream. */ - public primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** SplitReadStreamResponse remainderStream. */ - public remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); - - /** - * Creates a new SplitReadStreamResponse instance using the specified properties. - * @param [properties] Properties to set - * @returns SplitReadStreamResponse instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; - - /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @param message SplitReadStreamResponse message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; - - /** - * Verifies a SplitReadStreamResponse message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SplitReadStreamResponse - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; - - /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. - * @param message SplitReadStreamResponse - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SplitReadStreamResponse to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SplitReadStreamResponse - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a TableReference. */ - interface ITableReference { - - /** TableReference projectId */ - projectId?: (string|null); - - /** TableReference datasetId */ - datasetId?: (string|null); - - /** TableReference tableId */ - tableId?: (string|null); - } - - /** Represents a TableReference. */ - class TableReference implements ITableReference { - - /** - * Constructs a new TableReference. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference); - - /** TableReference projectId. */ - public projectId: string; - - /** TableReference datasetId. */ - public datasetId: string; - - /** TableReference tableId. */ - public tableId: string; - - /** - * Creates a new TableReference instance using the specified properties. - * @param [properties] Properties to set - * @returns TableReference instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference): google.cloud.bigquery.storage.v1beta1.TableReference; - - /** - * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @param message TableReference message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @param message TableReference message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableReference message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReference; - - /** - * Decodes a TableReference message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReference; - - /** - * Verifies a TableReference message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableReference message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableReference - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReference; - - /** - * Creates a plain object from a TableReference message. Also converts values to other types if specified. - * @param message TableReference - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableReference to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableReference - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a TableModifiers. */ - interface ITableModifiers { - - /** TableModifiers snapshotTime */ - snapshotTime?: (google.protobuf.ITimestamp|null); - } - - /** Represents a TableModifiers. */ - class TableModifiers implements ITableModifiers { - - /** - * Constructs a new TableModifiers. - * @param [properties] Properties to set - */ - constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers); - - /** TableModifiers snapshotTime. */ - public snapshotTime?: (google.protobuf.ITimestamp|null); - - /** - * Creates a new TableModifiers instance using the specified properties. - * @param [properties] Properties to set - * @returns TableModifiers instance - */ - public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers): google.cloud.bigquery.storage.v1beta1.TableModifiers; - - /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @param message TableModifiers message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a TableModifiers message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableModifiers; - - /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableModifiers; - - /** - * Verifies a TableModifiers message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns TableModifiers - */ - public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableModifiers; - - /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @param message TableModifiers - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this TableModifiers to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for TableModifiers - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - } - } - } - - /** Namespace protobuf. */ - namespace protobuf { - - /** Properties of a FileDescriptorSet. */ - interface IFileDescriptorSet { - - /** FileDescriptorSet file */ - file?: (google.protobuf.IFileDescriptorProto[]|null); - } - - /** Represents a FileDescriptorSet. */ - class FileDescriptorSet implements IFileDescriptorSet { - - /** - * Constructs a new FileDescriptorSet. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFileDescriptorSet); - - /** FileDescriptorSet file. */ - public file: google.protobuf.IFileDescriptorProto[]; - - /** - * Creates a new FileDescriptorSet instance using the specified properties. - * @param [properties] Properties to set - * @returns FileDescriptorSet instance - */ - public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; - - /** - * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @param message FileDescriptorSet message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @param message FileDescriptorSet message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FileDescriptorSet message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FileDescriptorSet - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; - - /** - * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FileDescriptorSet - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; - - /** - * Verifies a FileDescriptorSet message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FileDescriptorSet - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; - - /** - * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. - * @param message FileDescriptorSet - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FileDescriptorSet to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FileDescriptorSet - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FileDescriptorProto. */ - interface IFileDescriptorProto { - - /** FileDescriptorProto name */ - name?: (string|null); - - /** FileDescriptorProto package */ - "package"?: (string|null); - - /** FileDescriptorProto dependency */ - dependency?: (string[]|null); - - /** FileDescriptorProto publicDependency */ - publicDependency?: (number[]|null); - - /** FileDescriptorProto weakDependency */ - weakDependency?: (number[]|null); - - /** FileDescriptorProto messageType */ - messageType?: (google.protobuf.IDescriptorProto[]|null); - - /** FileDescriptorProto enumType */ - enumType?: (google.protobuf.IEnumDescriptorProto[]|null); - - /** FileDescriptorProto service */ - service?: (google.protobuf.IServiceDescriptorProto[]|null); - - /** FileDescriptorProto extension */ - extension?: (google.protobuf.IFieldDescriptorProto[]|null); - - /** FileDescriptorProto options */ - options?: (google.protobuf.IFileOptions|null); - - /** FileDescriptorProto sourceCodeInfo */ - sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); - - /** FileDescriptorProto syntax */ - syntax?: (string|null); - - /** FileDescriptorProto edition */ - edition?: (string|null); - } - - /** Represents a FileDescriptorProto. */ - class FileDescriptorProto implements IFileDescriptorProto { - - /** - * Constructs a new FileDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFileDescriptorProto); - - /** FileDescriptorProto name. */ - public name: string; - - /** FileDescriptorProto package. */ - public package: string; - - /** FileDescriptorProto dependency. */ - public dependency: string[]; - - /** FileDescriptorProto publicDependency. */ - public publicDependency: number[]; - - /** FileDescriptorProto weakDependency. */ - public weakDependency: number[]; - - /** FileDescriptorProto messageType. */ - public messageType: google.protobuf.IDescriptorProto[]; - - /** FileDescriptorProto enumType. */ - public enumType: google.protobuf.IEnumDescriptorProto[]; - - /** FileDescriptorProto service. */ - public service: google.protobuf.IServiceDescriptorProto[]; - - /** FileDescriptorProto extension. */ - public extension: google.protobuf.IFieldDescriptorProto[]; - - /** FileDescriptorProto options. */ - public options?: (google.protobuf.IFileOptions|null); - - /** FileDescriptorProto sourceCodeInfo. */ - public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); - - /** FileDescriptorProto syntax. */ - public syntax: string; - - /** FileDescriptorProto edition. */ - public edition: string; - - /** - * Creates a new FileDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns FileDescriptorProto instance - */ - public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; - - /** - * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @param message FileDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @param message FileDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FileDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FileDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; - - /** - * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FileDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; - - /** - * Verifies a FileDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FileDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; - - /** - * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. - * @param message FileDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FileDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FileDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a DescriptorProto. */ - interface IDescriptorProto { - - /** DescriptorProto name */ - name?: (string|null); - - /** DescriptorProto field */ - field?: (google.protobuf.IFieldDescriptorProto[]|null); - - /** DescriptorProto extension */ - extension?: (google.protobuf.IFieldDescriptorProto[]|null); - - /** DescriptorProto nestedType */ - nestedType?: (google.protobuf.IDescriptorProto[]|null); - - /** DescriptorProto enumType */ - enumType?: (google.protobuf.IEnumDescriptorProto[]|null); - - /** DescriptorProto extensionRange */ - extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); - - /** DescriptorProto oneofDecl */ - oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); - - /** DescriptorProto options */ - options?: (google.protobuf.IMessageOptions|null); - - /** DescriptorProto reservedRange */ - reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); - - /** DescriptorProto reservedName */ - reservedName?: (string[]|null); - } - - /** Represents a DescriptorProto. */ - class DescriptorProto implements IDescriptorProto { - - /** - * Constructs a new DescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IDescriptorProto); - - /** DescriptorProto name. */ - public name: string; - - /** DescriptorProto field. */ - public field: google.protobuf.IFieldDescriptorProto[]; - - /** DescriptorProto extension. */ - public extension: google.protobuf.IFieldDescriptorProto[]; - - /** DescriptorProto nestedType. */ - public nestedType: google.protobuf.IDescriptorProto[]; - - /** DescriptorProto enumType. */ - public enumType: google.protobuf.IEnumDescriptorProto[]; - - /** DescriptorProto extensionRange. */ - public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; - - /** DescriptorProto oneofDecl. */ - public oneofDecl: google.protobuf.IOneofDescriptorProto[]; - - /** DescriptorProto options. */ - public options?: (google.protobuf.IMessageOptions|null); - - /** DescriptorProto reservedRange. */ - public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; - - /** DescriptorProto reservedName. */ - public reservedName: string[]; - - /** - * Creates a new DescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns DescriptorProto instance - */ - public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; - - /** - * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @param message DescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @param message DescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a DescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns DescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; - - /** - * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns DescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; - - /** - * Verifies a DescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns DescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; - - /** - * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. - * @param message DescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this DescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for DescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace DescriptorProto { - - /** Properties of an ExtensionRange. */ - interface IExtensionRange { - - /** ExtensionRange start */ - start?: (number|null); - - /** ExtensionRange end */ - end?: (number|null); - - /** ExtensionRange options */ - options?: (google.protobuf.IExtensionRangeOptions|null); - } - - /** Represents an ExtensionRange. */ - class ExtensionRange implements IExtensionRange { - - /** - * Constructs a new ExtensionRange. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); - - /** ExtensionRange start. */ - public start: number; - - /** ExtensionRange end. */ - public end: number; - - /** ExtensionRange options. */ - public options?: (google.protobuf.IExtensionRangeOptions|null); - - /** - * Creates a new ExtensionRange instance using the specified properties. - * @param [properties] Properties to set - * @returns ExtensionRange instance - */ - public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @param message ExtensionRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @param message ExtensionRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Verifies an ExtensionRange message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ExtensionRange - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; - - /** - * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. - * @param message ExtensionRange - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ExtensionRange to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ExtensionRange - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ReservedRange. */ - interface IReservedRange { - - /** ReservedRange start */ - start?: (number|null); - - /** ReservedRange end */ - end?: (number|null); - } - - /** Represents a ReservedRange. */ - class ReservedRange implements IReservedRange { - - /** - * Constructs a new ReservedRange. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); - - /** ReservedRange start. */ - public start: number; - - /** ReservedRange end. */ - public end: number; - - /** - * Creates a new ReservedRange instance using the specified properties. - * @param [properties] Properties to set - * @returns ReservedRange instance - */ - public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; - - /** - * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @param message ReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @param message ReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ReservedRange message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; - - /** - * Decodes a ReservedRange message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; - - /** - * Verifies a ReservedRange message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ReservedRange - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; - - /** - * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. - * @param message ReservedRange - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ReservedRange to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ReservedRange - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of an ExtensionRangeOptions. */ - interface IExtensionRangeOptions { - - /** ExtensionRangeOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - } - - /** Represents an ExtensionRangeOptions. */ - class ExtensionRangeOptions implements IExtensionRangeOptions { - - /** - * Constructs a new ExtensionRangeOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IExtensionRangeOptions); - - /** ExtensionRangeOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new ExtensionRangeOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns ExtensionRangeOptions instance - */ - public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; - - /** - * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @param message ExtensionRangeOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @param message ExtensionRangeOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ExtensionRangeOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; - - /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ExtensionRangeOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; - - /** - * Verifies an ExtensionRangeOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ExtensionRangeOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; - - /** - * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. - * @param message ExtensionRangeOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ExtensionRangeOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ExtensionRangeOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FieldDescriptorProto. */ - interface IFieldDescriptorProto { - - /** FieldDescriptorProto name */ - name?: (string|null); - - /** FieldDescriptorProto number */ - number?: (number|null); - - /** FieldDescriptorProto label */ - label?: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label|null); - - /** FieldDescriptorProto type */ - type?: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type|null); - - /** FieldDescriptorProto typeName */ - typeName?: (string|null); - - /** FieldDescriptorProto extendee */ - extendee?: (string|null); - - /** FieldDescriptorProto defaultValue */ - defaultValue?: (string|null); - - /** FieldDescriptorProto oneofIndex */ - oneofIndex?: (number|null); - - /** FieldDescriptorProto jsonName */ - jsonName?: (string|null); - - /** FieldDescriptorProto options */ - options?: (google.protobuf.IFieldOptions|null); - - /** FieldDescriptorProto proto3Optional */ - proto3Optional?: (boolean|null); - } - - /** Represents a FieldDescriptorProto. */ - class FieldDescriptorProto implements IFieldDescriptorProto { - - /** - * Constructs a new FieldDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFieldDescriptorProto); - - /** FieldDescriptorProto name. */ - public name: string; - - /** FieldDescriptorProto number. */ - public number: number; - - /** FieldDescriptorProto label. */ - public label: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label); - - /** FieldDescriptorProto type. */ - public type: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type); - - /** FieldDescriptorProto typeName. */ - public typeName: string; - - /** FieldDescriptorProto extendee. */ - public extendee: string; - - /** FieldDescriptorProto defaultValue. */ - public defaultValue: string; - - /** FieldDescriptorProto oneofIndex. */ - public oneofIndex: number; - - /** FieldDescriptorProto jsonName. */ - public jsonName: string; - - /** FieldDescriptorProto options. */ - public options?: (google.protobuf.IFieldOptions|null); - - /** FieldDescriptorProto proto3Optional. */ - public proto3Optional: boolean; - - /** - * Creates a new FieldDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns FieldDescriptorProto instance - */ - public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; - - /** - * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @param message FieldDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @param message FieldDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FieldDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; - - /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FieldDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; - - /** - * Verifies a FieldDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FieldDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; - - /** - * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. - * @param message FieldDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FieldDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FieldDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace FieldDescriptorProto { - - /** Type enum. */ - enum Type { - TYPE_DOUBLE = 1, - TYPE_FLOAT = 2, - TYPE_INT64 = 3, - TYPE_UINT64 = 4, - TYPE_INT32 = 5, - TYPE_FIXED64 = 6, - TYPE_FIXED32 = 7, - TYPE_BOOL = 8, - TYPE_STRING = 9, - TYPE_GROUP = 10, - TYPE_MESSAGE = 11, - TYPE_BYTES = 12, - TYPE_UINT32 = 13, - TYPE_ENUM = 14, - TYPE_SFIXED32 = 15, - TYPE_SFIXED64 = 16, - TYPE_SINT32 = 17, - TYPE_SINT64 = 18 - } - - /** Label enum. */ - enum Label { - LABEL_OPTIONAL = 1, - LABEL_REQUIRED = 2, - LABEL_REPEATED = 3 - } - } - - /** Properties of an OneofDescriptorProto. */ - interface IOneofDescriptorProto { - - /** OneofDescriptorProto name */ - name?: (string|null); - - /** OneofDescriptorProto options */ - options?: (google.protobuf.IOneofOptions|null); - } - - /** Represents an OneofDescriptorProto. */ - class OneofDescriptorProto implements IOneofDescriptorProto { - - /** - * Constructs a new OneofDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IOneofDescriptorProto); - - /** OneofDescriptorProto name. */ - public name: string; - - /** OneofDescriptorProto options. */ - public options?: (google.protobuf.IOneofOptions|null); - - /** - * Creates a new OneofDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns OneofDescriptorProto instance - */ - public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; - - /** - * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @param message OneofDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @param message OneofDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; - - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; - - /** - * Verifies an OneofDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns OneofDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; - - /** - * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. - * @param message OneofDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this OneofDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for OneofDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an EnumDescriptorProto. */ - interface IEnumDescriptorProto { - - /** EnumDescriptorProto name */ - name?: (string|null); - - /** EnumDescriptorProto value */ - value?: (google.protobuf.IEnumValueDescriptorProto[]|null); - - /** EnumDescriptorProto options */ - options?: (google.protobuf.IEnumOptions|null); - - /** EnumDescriptorProto reservedRange */ - reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); - - /** EnumDescriptorProto reservedName */ - reservedName?: (string[]|null); - } - - /** Represents an EnumDescriptorProto. */ - class EnumDescriptorProto implements IEnumDescriptorProto { - - /** - * Constructs a new EnumDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IEnumDescriptorProto); - - /** EnumDescriptorProto name. */ - public name: string; - - /** EnumDescriptorProto value. */ - public value: google.protobuf.IEnumValueDescriptorProto[]; - - /** EnumDescriptorProto options. */ - public options?: (google.protobuf.IEnumOptions|null); - - /** EnumDescriptorProto reservedRange. */ - public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; - - /** EnumDescriptorProto reservedName. */ - public reservedName: string[]; - - /** - * Creates a new EnumDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns EnumDescriptorProto instance - */ - public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; - - /** - * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @param message EnumDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @param message EnumDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns EnumDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; - - /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns EnumDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; - - /** - * Verifies an EnumDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns EnumDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; - - /** - * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. - * @param message EnumDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this EnumDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for EnumDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace EnumDescriptorProto { - - /** Properties of an EnumReservedRange. */ - interface IEnumReservedRange { - - /** EnumReservedRange start */ - start?: (number|null); - - /** EnumReservedRange end */ - end?: (number|null); - } - - /** Represents an EnumReservedRange. */ - class EnumReservedRange implements IEnumReservedRange { - - /** - * Constructs a new EnumReservedRange. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); - - /** EnumReservedRange start. */ - public start: number; - - /** EnumReservedRange end. */ - public end: number; - - /** - * Creates a new EnumReservedRange instance using the specified properties. - * @param [properties] Properties to set - * @returns EnumReservedRange instance - */ - public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; - - /** - * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @param message EnumReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @param message EnumReservedRange message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an EnumReservedRange message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns EnumReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; - - /** - * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns EnumReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; - - /** - * Verifies an EnumReservedRange message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns EnumReservedRange - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; - - /** - * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. - * @param message EnumReservedRange - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this EnumReservedRange to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for EnumReservedRange - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of an EnumValueDescriptorProto. */ - interface IEnumValueDescriptorProto { - - /** EnumValueDescriptorProto name */ - name?: (string|null); - - /** EnumValueDescriptorProto number */ - number?: (number|null); - - /** EnumValueDescriptorProto options */ - options?: (google.protobuf.IEnumValueOptions|null); - } - - /** Represents an EnumValueDescriptorProto. */ - class EnumValueDescriptorProto implements IEnumValueDescriptorProto { - - /** - * Constructs a new EnumValueDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IEnumValueDescriptorProto); - - /** EnumValueDescriptorProto name. */ - public name: string; - - /** EnumValueDescriptorProto number. */ - public number: number; - - /** EnumValueDescriptorProto options. */ - public options?: (google.protobuf.IEnumValueOptions|null); - - /** - * Creates a new EnumValueDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns EnumValueDescriptorProto instance - */ - public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; - - /** - * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @param message EnumValueDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @param message EnumValueDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; - - /** - * Verifies an EnumValueDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns EnumValueDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; - - /** - * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. - * @param message EnumValueDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this EnumValueDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for EnumValueDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ServiceDescriptorProto. */ - interface IServiceDescriptorProto { - - /** ServiceDescriptorProto name */ - name?: (string|null); - - /** ServiceDescriptorProto method */ - method?: (google.protobuf.IMethodDescriptorProto[]|null); - - /** ServiceDescriptorProto options */ - options?: (google.protobuf.IServiceOptions|null); - } - - /** Represents a ServiceDescriptorProto. */ - class ServiceDescriptorProto implements IServiceDescriptorProto { - - /** - * Constructs a new ServiceDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IServiceDescriptorProto); - - /** ServiceDescriptorProto name. */ - public name: string; - - /** ServiceDescriptorProto method. */ - public method: google.protobuf.IMethodDescriptorProto[]; - - /** ServiceDescriptorProto options. */ - public options?: (google.protobuf.IServiceOptions|null); - - /** - * Creates a new ServiceDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns ServiceDescriptorProto instance - */ - public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; - - /** - * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @param message ServiceDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @param message ServiceDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ServiceDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; - - /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ServiceDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; - - /** - * Verifies a ServiceDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ServiceDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; - - /** - * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. - * @param message ServiceDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ServiceDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ServiceDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a MethodDescriptorProto. */ - interface IMethodDescriptorProto { - - /** MethodDescriptorProto name */ - name?: (string|null); - - /** MethodDescriptorProto inputType */ - inputType?: (string|null); - - /** MethodDescriptorProto outputType */ - outputType?: (string|null); - - /** MethodDescriptorProto options */ - options?: (google.protobuf.IMethodOptions|null); - - /** MethodDescriptorProto clientStreaming */ - clientStreaming?: (boolean|null); - - /** MethodDescriptorProto serverStreaming */ - serverStreaming?: (boolean|null); - } - - /** Represents a MethodDescriptorProto. */ - class MethodDescriptorProto implements IMethodDescriptorProto { - - /** - * Constructs a new MethodDescriptorProto. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IMethodDescriptorProto); - - /** MethodDescriptorProto name. */ - public name: string; - - /** MethodDescriptorProto inputType. */ - public inputType: string; - - /** MethodDescriptorProto outputType. */ - public outputType: string; - - /** MethodDescriptorProto options. */ - public options?: (google.protobuf.IMethodOptions|null); - - /** MethodDescriptorProto clientStreaming. */ - public clientStreaming: boolean; - - /** MethodDescriptorProto serverStreaming. */ - public serverStreaming: boolean; - - /** - * Creates a new MethodDescriptorProto instance using the specified properties. - * @param [properties] Properties to set - * @returns MethodDescriptorProto instance - */ - public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; - - /** - * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @param message MethodDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @param message MethodDescriptorProto message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; - - /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; - - /** - * Verifies a MethodDescriptorProto message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns MethodDescriptorProto - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; - - /** - * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. - * @param message MethodDescriptorProto - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this MethodDescriptorProto to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for MethodDescriptorProto - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FileOptions. */ - interface IFileOptions { - - /** FileOptions javaPackage */ - javaPackage?: (string|null); - - /** FileOptions javaOuterClassname */ - javaOuterClassname?: (string|null); - - /** FileOptions javaMultipleFiles */ - javaMultipleFiles?: (boolean|null); - - /** FileOptions javaGenerateEqualsAndHash */ - javaGenerateEqualsAndHash?: (boolean|null); - - /** FileOptions javaStringCheckUtf8 */ - javaStringCheckUtf8?: (boolean|null); - - /** FileOptions optimizeFor */ - optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode|null); - - /** FileOptions goPackage */ - goPackage?: (string|null); - - /** FileOptions ccGenericServices */ - ccGenericServices?: (boolean|null); - - /** FileOptions javaGenericServices */ - javaGenericServices?: (boolean|null); - - /** FileOptions pyGenericServices */ - pyGenericServices?: (boolean|null); - - /** FileOptions phpGenericServices */ - phpGenericServices?: (boolean|null); - - /** FileOptions deprecated */ - deprecated?: (boolean|null); - - /** FileOptions ccEnableArenas */ - ccEnableArenas?: (boolean|null); - - /** FileOptions objcClassPrefix */ - objcClassPrefix?: (string|null); - - /** FileOptions csharpNamespace */ - csharpNamespace?: (string|null); - - /** FileOptions swiftPrefix */ - swiftPrefix?: (string|null); - - /** FileOptions phpClassPrefix */ - phpClassPrefix?: (string|null); - - /** FileOptions phpNamespace */ - phpNamespace?: (string|null); - - /** FileOptions phpMetadataNamespace */ - phpMetadataNamespace?: (string|null); - - /** FileOptions rubyPackage */ - rubyPackage?: (string|null); - - /** FileOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** FileOptions .google.api.resourceDefinition */ - ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); - } - - /** Represents a FileOptions. */ - class FileOptions implements IFileOptions { - - /** - * Constructs a new FileOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFileOptions); - - /** FileOptions javaPackage. */ - public javaPackage: string; - - /** FileOptions javaOuterClassname. */ - public javaOuterClassname: string; - - /** FileOptions javaMultipleFiles. */ - public javaMultipleFiles: boolean; - - /** FileOptions javaGenerateEqualsAndHash. */ - public javaGenerateEqualsAndHash: boolean; - - /** FileOptions javaStringCheckUtf8. */ - public javaStringCheckUtf8: boolean; - - /** FileOptions optimizeFor. */ - public optimizeFor: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode); - - /** FileOptions goPackage. */ - public goPackage: string; - - /** FileOptions ccGenericServices. */ - public ccGenericServices: boolean; - - /** FileOptions javaGenericServices. */ - public javaGenericServices: boolean; - - /** FileOptions pyGenericServices. */ - public pyGenericServices: boolean; - - /** FileOptions phpGenericServices. */ - public phpGenericServices: boolean; - - /** FileOptions deprecated. */ - public deprecated: boolean; - - /** FileOptions ccEnableArenas. */ - public ccEnableArenas: boolean; - - /** FileOptions objcClassPrefix. */ - public objcClassPrefix: string; - - /** FileOptions csharpNamespace. */ - public csharpNamespace: string; - - /** FileOptions swiftPrefix. */ - public swiftPrefix: string; - - /** FileOptions phpClassPrefix. */ - public phpClassPrefix: string; - - /** FileOptions phpNamespace. */ - public phpNamespace: string; - - /** FileOptions phpMetadataNamespace. */ - public phpMetadataNamespace: string; - - /** FileOptions rubyPackage. */ - public rubyPackage: string; - - /** FileOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new FileOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns FileOptions instance - */ - public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; - - /** - * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @param message FileOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @param message FileOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FileOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FileOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; - - /** - * Decodes a FileOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FileOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; - - /** - * Verifies a FileOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FileOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; - - /** - * Creates a plain object from a FileOptions message. Also converts values to other types if specified. - * @param message FileOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FileOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FileOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace FileOptions { - - /** OptimizeMode enum. */ - enum OptimizeMode { - SPEED = 1, - CODE_SIZE = 2, - LITE_RUNTIME = 3 - } - } - - /** Properties of a MessageOptions. */ - interface IMessageOptions { - - /** MessageOptions messageSetWireFormat */ - messageSetWireFormat?: (boolean|null); - - /** MessageOptions noStandardDescriptorAccessor */ - noStandardDescriptorAccessor?: (boolean|null); - - /** MessageOptions deprecated */ - deprecated?: (boolean|null); - - /** MessageOptions mapEntry */ - mapEntry?: (boolean|null); - - /** MessageOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** MessageOptions .google.api.resource */ - ".google.api.resource"?: (google.api.IResourceDescriptor|null); - } - - /** Represents a MessageOptions. */ - class MessageOptions implements IMessageOptions { - - /** - * Constructs a new MessageOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IMessageOptions); - - /** MessageOptions messageSetWireFormat. */ - public messageSetWireFormat: boolean; - - /** MessageOptions noStandardDescriptorAccessor. */ - public noStandardDescriptorAccessor: boolean; - - /** MessageOptions deprecated. */ - public deprecated: boolean; - - /** MessageOptions mapEntry. */ - public mapEntry: boolean; - - /** MessageOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new MessageOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns MessageOptions instance - */ - public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; - - /** - * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @param message MessageOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @param message MessageOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a MessageOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns MessageOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; - - /** - * Decodes a MessageOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns MessageOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; - - /** - * Verifies a MessageOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns MessageOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; - - /** - * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. - * @param message MessageOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this MessageOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for MessageOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FieldOptions. */ - interface IFieldOptions { - - /** FieldOptions ctype */ - ctype?: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType|null); - - /** FieldOptions packed */ - packed?: (boolean|null); - - /** FieldOptions jstype */ - jstype?: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType|null); - - /** FieldOptions lazy */ - lazy?: (boolean|null); - - /** FieldOptions unverifiedLazy */ - unverifiedLazy?: (boolean|null); - - /** FieldOptions deprecated */ - deprecated?: (boolean|null); - - /** FieldOptions weak */ - weak?: (boolean|null); - - /** FieldOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** FieldOptions .google.cloud.bigquery.storage.v1.columnName */ - ".google.cloud.bigquery.storage.v1.columnName"?: (string|null); - - /** FieldOptions .google.api.fieldBehavior */ - ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); - - /** FieldOptions .google.api.resourceReference */ - ".google.api.resourceReference"?: (google.api.IResourceReference|null); - } - - /** Represents a FieldOptions. */ - class FieldOptions implements IFieldOptions { - - /** - * Constructs a new FieldOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFieldOptions); - - /** FieldOptions ctype. */ - public ctype: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType); - - /** FieldOptions packed. */ - public packed: boolean; - - /** FieldOptions jstype. */ - public jstype: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType); - - /** FieldOptions lazy. */ - public lazy: boolean; - - /** FieldOptions unverifiedLazy. */ - public unverifiedLazy: boolean; - - /** FieldOptions deprecated. */ - public deprecated: boolean; - - /** FieldOptions weak. */ - public weak: boolean; - - /** FieldOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new FieldOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns FieldOptions instance - */ - public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; - - /** - * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @param message FieldOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @param message FieldOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FieldOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FieldOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; - - /** - * Decodes a FieldOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FieldOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; - - /** - * Verifies a FieldOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FieldOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; - - /** - * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. - * @param message FieldOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FieldOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FieldOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace FieldOptions { - - /** CType enum. */ - enum CType { - STRING = 0, - CORD = 1, - STRING_PIECE = 2 - } - - /** JSType enum. */ - enum JSType { - JS_NORMAL = 0, - JS_STRING = 1, - JS_NUMBER = 2 - } - } - - /** Properties of an OneofOptions. */ - interface IOneofOptions { - - /** OneofOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - } - - /** Represents an OneofOptions. */ - class OneofOptions implements IOneofOptions { - - /** - * Constructs a new OneofOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IOneofOptions); - - /** OneofOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new OneofOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns OneofOptions instance - */ - public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; - - /** - * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @param message OneofOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @param message OneofOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an OneofOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns OneofOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; - - /** - * Decodes an OneofOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns OneofOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; - - /** - * Verifies an OneofOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns OneofOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; - - /** - * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. - * @param message OneofOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this OneofOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for OneofOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an EnumOptions. */ - interface IEnumOptions { - - /** EnumOptions allowAlias */ - allowAlias?: (boolean|null); - - /** EnumOptions deprecated */ - deprecated?: (boolean|null); - - /** EnumOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - } - - /** Represents an EnumOptions. */ - class EnumOptions implements IEnumOptions { - - /** - * Constructs a new EnumOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IEnumOptions); - - /** EnumOptions allowAlias. */ - public allowAlias: boolean; - - /** EnumOptions deprecated. */ - public deprecated: boolean; - - /** EnumOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new EnumOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns EnumOptions instance - */ - public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; - - /** - * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @param message EnumOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @param message EnumOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an EnumOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns EnumOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; - - /** - * Decodes an EnumOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns EnumOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; - - /** - * Verifies an EnumOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns EnumOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; - - /** - * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. - * @param message EnumOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this EnumOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for EnumOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an EnumValueOptions. */ - interface IEnumValueOptions { - - /** EnumValueOptions deprecated */ - deprecated?: (boolean|null); - - /** EnumValueOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - } - - /** Represents an EnumValueOptions. */ - class EnumValueOptions implements IEnumValueOptions { - - /** - * Constructs a new EnumValueOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IEnumValueOptions); - - /** EnumValueOptions deprecated. */ - public deprecated: boolean; - - /** EnumValueOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new EnumValueOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns EnumValueOptions instance - */ - public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; - - /** - * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @param message EnumValueOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @param message EnumValueOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an EnumValueOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns EnumValueOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; - - /** - * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns EnumValueOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; - - /** - * Verifies an EnumValueOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns EnumValueOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; - - /** - * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. - * @param message EnumValueOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this EnumValueOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for EnumValueOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a ServiceOptions. */ - interface IServiceOptions { - - /** ServiceOptions deprecated */ - deprecated?: (boolean|null); - - /** ServiceOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** ServiceOptions .google.api.defaultHost */ - ".google.api.defaultHost"?: (string|null); - - /** ServiceOptions .google.api.oauthScopes */ - ".google.api.oauthScopes"?: (string|null); - } - - /** Represents a ServiceOptions. */ - class ServiceOptions implements IServiceOptions { - - /** - * Constructs a new ServiceOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IServiceOptions); - - /** ServiceOptions deprecated. */ - public deprecated: boolean; - - /** ServiceOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new ServiceOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns ServiceOptions instance - */ - public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; - - /** - * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @param message ServiceOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @param message ServiceOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ServiceOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ServiceOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; - - /** - * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ServiceOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; - - /** - * Verifies a ServiceOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ServiceOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; - - /** - * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. - * @param message ServiceOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ServiceOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ServiceOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a MethodOptions. */ - interface IMethodOptions { - - /** MethodOptions deprecated */ - deprecated?: (boolean|null); - - /** MethodOptions idempotencyLevel */ - idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); - - /** MethodOptions uninterpretedOption */ - uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); - - /** MethodOptions .google.api.http */ - ".google.api.http"?: (google.api.IHttpRule|null); - - /** MethodOptions .google.api.methodSignature */ - ".google.api.methodSignature"?: (string[]|null); - } - - /** Represents a MethodOptions. */ - class MethodOptions implements IMethodOptions { - - /** - * Constructs a new MethodOptions. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IMethodOptions); - - /** MethodOptions deprecated. */ - public deprecated: boolean; - - /** MethodOptions idempotencyLevel. */ - public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); - - /** MethodOptions uninterpretedOption. */ - public uninterpretedOption: google.protobuf.IUninterpretedOption[]; - - /** - * Creates a new MethodOptions instance using the specified properties. - * @param [properties] Properties to set - * @returns MethodOptions instance - */ - public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; - - /** - * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @param message MethodOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @param message MethodOptions message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a MethodOptions message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns MethodOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; - - /** - * Decodes a MethodOptions message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns MethodOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; - - /** - * Verifies a MethodOptions message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns MethodOptions - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; - - /** - * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. - * @param message MethodOptions - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this MethodOptions to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for MethodOptions - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace MethodOptions { - - /** IdempotencyLevel enum. */ - enum IdempotencyLevel { - IDEMPOTENCY_UNKNOWN = 0, - NO_SIDE_EFFECTS = 1, - IDEMPOTENT = 2 - } - } - - /** Properties of an UninterpretedOption. */ - interface IUninterpretedOption { - - /** UninterpretedOption name */ - name?: (google.protobuf.UninterpretedOption.INamePart[]|null); - - /** UninterpretedOption identifierValue */ - identifierValue?: (string|null); - - /** UninterpretedOption positiveIntValue */ - positiveIntValue?: (number|Long|string|null); - - /** UninterpretedOption negativeIntValue */ - negativeIntValue?: (number|Long|string|null); - - /** UninterpretedOption doubleValue */ - doubleValue?: (number|null); - - /** UninterpretedOption stringValue */ - stringValue?: (Uint8Array|string|null); - - /** UninterpretedOption aggregateValue */ - aggregateValue?: (string|null); - } - - /** Represents an UninterpretedOption. */ - class UninterpretedOption implements IUninterpretedOption { - - /** - * Constructs a new UninterpretedOption. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IUninterpretedOption); - - /** UninterpretedOption name. */ - public name: google.protobuf.UninterpretedOption.INamePart[]; - - /** UninterpretedOption identifierValue. */ - public identifierValue: string; - - /** UninterpretedOption positiveIntValue. */ - public positiveIntValue: (number|Long|string); - - /** UninterpretedOption negativeIntValue. */ - public negativeIntValue: (number|Long|string); - - /** UninterpretedOption doubleValue. */ - public doubleValue: number; - - /** UninterpretedOption stringValue. */ - public stringValue: (Uint8Array|string); - - /** UninterpretedOption aggregateValue. */ - public aggregateValue: string; - - /** - * Creates a new UninterpretedOption instance using the specified properties. - * @param [properties] Properties to set - * @returns UninterpretedOption instance - */ - public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; - - /** - * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @param message UninterpretedOption message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @param message UninterpretedOption message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an UninterpretedOption message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns UninterpretedOption - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; - - /** - * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns UninterpretedOption - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; - - /** - * Verifies an UninterpretedOption message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns UninterpretedOption - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; - - /** - * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. - * @param message UninterpretedOption - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this UninterpretedOption to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for UninterpretedOption - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace UninterpretedOption { - - /** Properties of a NamePart. */ - interface INamePart { - - /** NamePart namePart */ - namePart: string; - - /** NamePart isExtension */ - isExtension: boolean; - } - - /** Represents a NamePart. */ - class NamePart implements INamePart { - - /** - * Constructs a new NamePart. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.UninterpretedOption.INamePart); - - /** NamePart namePart. */ - public namePart: string; - - /** NamePart isExtension. */ - public isExtension: boolean; - - /** - * Creates a new NamePart instance using the specified properties. - * @param [properties] Properties to set - * @returns NamePart instance - */ - public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; - - /** - * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @param message NamePart message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @param message NamePart message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a NamePart message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; - - /** - * Decodes a NamePart message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; - - /** - * Verifies a NamePart message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a NamePart message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns NamePart - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; - - /** - * Creates a plain object from a NamePart message. Also converts values to other types if specified. - * @param message NamePart - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this NamePart to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for NamePart - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of a SourceCodeInfo. */ - interface ISourceCodeInfo { - - /** SourceCodeInfo location */ - location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); - } - - /** Represents a SourceCodeInfo. */ - class SourceCodeInfo implements ISourceCodeInfo { - - /** - * Constructs a new SourceCodeInfo. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.ISourceCodeInfo); - - /** SourceCodeInfo location. */ - public location: google.protobuf.SourceCodeInfo.ILocation[]; - - /** - * Creates a new SourceCodeInfo instance using the specified properties. - * @param [properties] Properties to set - * @returns SourceCodeInfo instance - */ - public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; - - /** - * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @param message SourceCodeInfo message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @param message SourceCodeInfo message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SourceCodeInfo message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SourceCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; - - /** - * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SourceCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; - - /** - * Verifies a SourceCodeInfo message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SourceCodeInfo - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; - - /** - * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. - * @param message SourceCodeInfo - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SourceCodeInfo to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SourceCodeInfo - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace SourceCodeInfo { - - /** Properties of a Location. */ - interface ILocation { - - /** Location path */ - path?: (number[]|null); - - /** Location span */ - span?: (number[]|null); - - /** Location leadingComments */ - leadingComments?: (string|null); - - /** Location trailingComments */ - trailingComments?: (string|null); - - /** Location leadingDetachedComments */ - leadingDetachedComments?: (string[]|null); - } - - /** Represents a Location. */ - class Location implements ILocation { - - /** - * Constructs a new Location. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); - - /** Location path. */ - public path: number[]; - - /** Location span. */ - public span: number[]; - - /** Location leadingComments. */ - public leadingComments: string; - - /** Location trailingComments. */ - public trailingComments: string; - - /** Location leadingDetachedComments. */ - public leadingDetachedComments: string[]; - - /** - * Creates a new Location instance using the specified properties. - * @param [properties] Properties to set - * @returns Location instance - */ - public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; - - /** - * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @param message Location message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @param message Location message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Location message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; - - /** - * Decodes a Location message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; - - /** - * Verifies a Location message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Location message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Location - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; - - /** - * Creates a plain object from a Location message. Also converts values to other types if specified. - * @param message Location - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Location to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Location - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Properties of a GeneratedCodeInfo. */ - interface IGeneratedCodeInfo { - - /** GeneratedCodeInfo annotation */ - annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); - } - - /** Represents a GeneratedCodeInfo. */ - class GeneratedCodeInfo implements IGeneratedCodeInfo { - - /** - * Constructs a new GeneratedCodeInfo. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IGeneratedCodeInfo); - - /** GeneratedCodeInfo annotation. */ - public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; - - /** - * Creates a new GeneratedCodeInfo instance using the specified properties. - * @param [properties] Properties to set - * @returns GeneratedCodeInfo instance - */ - public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; - - /** - * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @param message GeneratedCodeInfo message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @param message GeneratedCodeInfo message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns GeneratedCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; - - /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns GeneratedCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; - - /** - * Verifies a GeneratedCodeInfo message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns GeneratedCodeInfo - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; - - /** - * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. - * @param message GeneratedCodeInfo - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this GeneratedCodeInfo to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for GeneratedCodeInfo - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace GeneratedCodeInfo { - - /** Properties of an Annotation. */ - interface IAnnotation { - - /** Annotation path */ - path?: (number[]|null); - - /** Annotation sourceFile */ - sourceFile?: (string|null); - - /** Annotation begin */ - begin?: (number|null); - - /** Annotation end */ - end?: (number|null); - - /** Annotation semantic */ - semantic?: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null); - } - - /** Represents an Annotation. */ - class Annotation implements IAnnotation { - - /** - * Constructs a new Annotation. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); - - /** Annotation path. */ - public path: number[]; - - /** Annotation sourceFile. */ - public sourceFile: string; - - /** Annotation begin. */ - public begin: number; - - /** Annotation end. */ - public end: number; - - /** Annotation semantic. */ - public semantic: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic); - - /** - * Creates a new Annotation instance using the specified properties. - * @param [properties] Properties to set - * @returns Annotation instance - */ - public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @param message Annotation message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @param message Annotation message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Annotation message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Decodes an Annotation message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Verifies an Annotation message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Annotation message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Annotation - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; - - /** - * Creates a plain object from an Annotation message. Also converts values to other types if specified. - * @param message Annotation - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Annotation to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Annotation - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace Annotation { - - /** Semantic enum. */ - enum Semantic { - NONE = 0, - SET = 1, - ALIAS = 2 - } - } - } - - /** Properties of a Timestamp. */ - interface ITimestamp { - - /** Timestamp seconds */ - seconds?: (number|Long|string|null); - - /** Timestamp nanos */ - nanos?: (number|null); - } - - /** Represents a Timestamp. */ - class Timestamp implements ITimestamp { - - /** - * Constructs a new Timestamp. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.ITimestamp); - - /** Timestamp seconds. */ - public seconds: (number|Long|string); - - /** Timestamp nanos. */ - public nanos: number; - - /** - * Creates a new Timestamp instance using the specified properties. - * @param [properties] Properties to set - * @returns Timestamp instance - */ - public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; - - /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Timestamp message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; - - /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; - - /** - * Verifies a Timestamp message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Timestamp - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; - - /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. - * @param message Timestamp - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Timestamp to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Timestamp - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a DoubleValue. */ - interface IDoubleValue { - - /** DoubleValue value */ - value?: (number|null); - } - - /** Represents a DoubleValue. */ - class DoubleValue implements IDoubleValue { - - /** - * Constructs a new DoubleValue. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IDoubleValue); - - /** DoubleValue value. */ - public value: number; - - /** - * Creates a new DoubleValue instance using the specified properties. - * @param [properties] Properties to set - * @returns DoubleValue instance - */ - public static create(properties?: google.protobuf.IDoubleValue): google.protobuf.DoubleValue; - - /** - * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. - * @param message DoubleValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. - * @param message DoubleValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a DoubleValue message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns DoubleValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DoubleValue; - - /** - * Decodes a DoubleValue message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns DoubleValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DoubleValue; - - /** - * Verifies a DoubleValue message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns DoubleValue - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.DoubleValue; - - /** - * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. - * @param message DoubleValue - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.DoubleValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this DoubleValue to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for DoubleValue - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a FloatValue. */ - interface IFloatValue { - - /** FloatValue value */ - value?: (number|null); - } - - /** Represents a FloatValue. */ - class FloatValue implements IFloatValue { - - /** - * Constructs a new FloatValue. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IFloatValue); - - /** FloatValue value. */ - public value: number; - - /** - * Creates a new FloatValue instance using the specified properties. - * @param [properties] Properties to set - * @returns FloatValue instance - */ - public static create(properties?: google.protobuf.IFloatValue): google.protobuf.FloatValue; - - /** - * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. - * @param message FloatValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. - * @param message FloatValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FloatValue message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FloatValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FloatValue; - - /** - * Decodes a FloatValue message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FloatValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FloatValue; - - /** - * Verifies a FloatValue message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FloatValue - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FloatValue; - - /** - * Creates a plain object from a FloatValue message. Also converts values to other types if specified. - * @param message FloatValue - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FloatValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FloatValue to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FloatValue - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an Int64Value. */ - interface IInt64Value { - - /** Int64Value value */ - value?: (number|Long|string|null); - } - - /** Represents an Int64Value. */ - class Int64Value implements IInt64Value { - - /** - * Constructs a new Int64Value. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IInt64Value); - - /** Int64Value value. */ - public value: (number|Long|string); - - /** - * Creates a new Int64Value instance using the specified properties. - * @param [properties] Properties to set - * @returns Int64Value instance - */ - public static create(properties?: google.protobuf.IInt64Value): google.protobuf.Int64Value; - - /** - * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. - * @param message Int64Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. - * @param message Int64Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Int64Value message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Int64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int64Value; - - /** - * Decodes an Int64Value message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Int64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int64Value; - - /** - * Verifies an Int64Value message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Int64Value - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Int64Value; - - /** - * Creates a plain object from an Int64Value message. Also converts values to other types if specified. - * @param message Int64Value - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Int64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Int64Value to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Int64Value - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a UInt64Value. */ - interface IUInt64Value { - - /** UInt64Value value */ - value?: (number|Long|string|null); - } - - /** Represents a UInt64Value. */ - class UInt64Value implements IUInt64Value { - - /** - * Constructs a new UInt64Value. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IUInt64Value); - - /** UInt64Value value. */ - public value: (number|Long|string); - - /** - * Creates a new UInt64Value instance using the specified properties. - * @param [properties] Properties to set - * @returns UInt64Value instance - */ - public static create(properties?: google.protobuf.IUInt64Value): google.protobuf.UInt64Value; - - /** - * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. - * @param message UInt64Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. - * @param message UInt64Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a UInt64Value message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns UInt64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt64Value; - - /** - * Decodes a UInt64Value message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns UInt64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt64Value; - - /** - * Verifies a UInt64Value message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns UInt64Value - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UInt64Value; - - /** - * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. - * @param message UInt64Value - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.UInt64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this UInt64Value to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for UInt64Value - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an Int32Value. */ - interface IInt32Value { - - /** Int32Value value */ - value?: (number|null); - } - - /** Represents an Int32Value. */ - class Int32Value implements IInt32Value { - - /** - * Constructs a new Int32Value. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IInt32Value); - - /** Int32Value value. */ - public value: number; - - /** - * Creates a new Int32Value instance using the specified properties. - * @param [properties] Properties to set - * @returns Int32Value instance - */ - public static create(properties?: google.protobuf.IInt32Value): google.protobuf.Int32Value; - - /** - * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. - * @param message Int32Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. - * @param message Int32Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Int32Value message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Int32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int32Value; - - /** - * Decodes an Int32Value message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Int32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int32Value; - - /** - * Verifies an Int32Value message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Int32Value - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Int32Value; - - /** - * Creates a plain object from an Int32Value message. Also converts values to other types if specified. - * @param message Int32Value - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Int32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Int32Value to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Int32Value - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a UInt32Value. */ - interface IUInt32Value { - - /** UInt32Value value */ - value?: (number|null); - } - - /** Represents a UInt32Value. */ - class UInt32Value implements IUInt32Value { - - /** - * Constructs a new UInt32Value. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IUInt32Value); - - /** UInt32Value value. */ - public value: number; - - /** - * Creates a new UInt32Value instance using the specified properties. - * @param [properties] Properties to set - * @returns UInt32Value instance - */ - public static create(properties?: google.protobuf.IUInt32Value): google.protobuf.UInt32Value; - - /** - * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. - * @param message UInt32Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. - * @param message UInt32Value message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a UInt32Value message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns UInt32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt32Value; - - /** - * Decodes a UInt32Value message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns UInt32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt32Value; - - /** - * Verifies a UInt32Value message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns UInt32Value - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.UInt32Value; - - /** - * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. - * @param message UInt32Value - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.UInt32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this UInt32Value to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for UInt32Value - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BoolValue. */ - interface IBoolValue { - - /** BoolValue value */ - value?: (boolean|null); - } - - /** Represents a BoolValue. */ - class BoolValue implements IBoolValue { - - /** - * Constructs a new BoolValue. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IBoolValue); - - /** BoolValue value. */ - public value: boolean; - - /** - * Creates a new BoolValue instance using the specified properties. - * @param [properties] Properties to set - * @returns BoolValue instance - */ - public static create(properties?: google.protobuf.IBoolValue): google.protobuf.BoolValue; - - /** - * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. - * @param message BoolValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. - * @param message BoolValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BoolValue message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BoolValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BoolValue; - - /** - * Decodes a BoolValue message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BoolValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BoolValue; - - /** - * Verifies a BoolValue message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BoolValue - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.BoolValue; - - /** - * Creates a plain object from a BoolValue message. Also converts values to other types if specified. - * @param message BoolValue - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.BoolValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BoolValue to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BoolValue - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a StringValue. */ - interface IStringValue { - - /** StringValue value */ - value?: (string|null); - } - - /** Represents a StringValue. */ - class StringValue implements IStringValue { - - /** - * Constructs a new StringValue. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IStringValue); - - /** StringValue value. */ - public value: string; - - /** - * Creates a new StringValue instance using the specified properties. - * @param [properties] Properties to set - * @returns StringValue instance - */ - public static create(properties?: google.protobuf.IStringValue): google.protobuf.StringValue; - - /** - * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. - * @param message StringValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. - * @param message StringValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a StringValue message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns StringValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.StringValue; - - /** - * Decodes a StringValue message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns StringValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.StringValue; - - /** - * Verifies a StringValue message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a StringValue message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns StringValue - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.StringValue; - - /** - * Creates a plain object from a StringValue message. Also converts values to other types if specified. - * @param message StringValue - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.StringValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this StringValue to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for StringValue - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a BytesValue. */ - interface IBytesValue { - - /** BytesValue value */ - value?: (Uint8Array|string|null); - } - - /** Represents a BytesValue. */ - class BytesValue implements IBytesValue { - - /** - * Constructs a new BytesValue. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IBytesValue); - - /** BytesValue value. */ - public value: (Uint8Array|string); - - /** - * Creates a new BytesValue instance using the specified properties. - * @param [properties] Properties to set - * @returns BytesValue instance - */ - public static create(properties?: google.protobuf.IBytesValue): google.protobuf.BytesValue; - - /** - * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. - * @param message BytesValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. - * @param message BytesValue message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a BytesValue message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns BytesValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BytesValue; - - /** - * Decodes a BytesValue message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns BytesValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BytesValue; - - /** - * Verifies a BytesValue message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns BytesValue - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.BytesValue; - - /** - * Creates a plain object from a BytesValue message. Also converts values to other types if specified. - * @param message BytesValue - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.BytesValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this BytesValue to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for BytesValue - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an Any. */ - interface IAny { - - /** Any type_url */ - type_url?: (string|null); - - /** Any value */ - value?: (Uint8Array|string|null); - } - - /** Represents an Any. */ - class Any implements IAny { - - /** - * Constructs a new Any. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IAny); - - /** Any type_url. */ - public type_url: string; - - /** Any value. */ - public value: (Uint8Array|string); - - /** - * Creates a new Any instance using the specified properties. - * @param [properties] Properties to set - * @returns Any instance - */ - public static create(properties?: google.protobuf.IAny): google.protobuf.Any; - - /** - * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. - * @param message Any message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. - * @param message Any message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Any message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Any - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; - - /** - * Decodes an Any message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Any - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; - - /** - * Verifies an Any message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Any message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Any - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Any; - - /** - * Creates a plain object from an Any message. Also converts values to other types if specified. - * @param message Any - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Any to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Any - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of an Empty. */ - interface IEmpty { - } - - /** Represents an Empty. */ - class Empty implements IEmpty { - - /** - * Constructs a new Empty. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.IEmpty); - - /** - * Creates a new Empty instance using the specified properties. - * @param [properties] Properties to set - * @returns Empty instance - */ - public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; - - /** - * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @param message Empty message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @param message Empty message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an Empty message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Empty - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; - - /** - * Decodes an Empty message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Empty - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; - - /** - * Verifies an Empty message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an Empty message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Empty - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; - - /** - * Creates a plain object from an Empty message. Also converts values to other types if specified. - * @param message Empty - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Empty to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Empty - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Namespace api. */ - namespace api { - - /** Properties of a Http. */ - interface IHttp { - - /** Http rules */ - rules?: (google.api.IHttpRule[]|null); - - /** Http fullyDecodeReservedExpansion */ - fullyDecodeReservedExpansion?: (boolean|null); - } - - /** Represents a Http. */ - class Http implements IHttp { - - /** - * Constructs a new Http. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.IHttp); - - /** Http rules. */ - public rules: google.api.IHttpRule[]; - - /** Http fullyDecodeReservedExpansion. */ - public fullyDecodeReservedExpansion: boolean; - - /** - * Creates a new Http instance using the specified properties. - * @param [properties] Properties to set - * @returns Http instance - */ - public static create(properties?: google.api.IHttp): google.api.Http; - - /** - * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @param message Http message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @param message Http message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Http message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Http - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; - - /** - * Decodes a Http message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Http - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; - - /** - * Verifies a Http message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Http message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Http - */ - public static fromObject(object: { [k: string]: any }): google.api.Http; - - /** - * Creates a plain object from a Http message. Also converts values to other types if specified. - * @param message Http - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Http to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Http - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a HttpRule. */ - interface IHttpRule { - - /** HttpRule selector */ - selector?: (string|null); - - /** HttpRule get */ - get?: (string|null); - - /** HttpRule put */ - put?: (string|null); - - /** HttpRule post */ - post?: (string|null); - - /** HttpRule delete */ - "delete"?: (string|null); - - /** HttpRule patch */ - patch?: (string|null); - - /** HttpRule custom */ - custom?: (google.api.ICustomHttpPattern|null); - - /** HttpRule body */ - body?: (string|null); - - /** HttpRule responseBody */ - responseBody?: (string|null); - - /** HttpRule additionalBindings */ - additionalBindings?: (google.api.IHttpRule[]|null); - } - - /** Represents a HttpRule. */ - class HttpRule implements IHttpRule { - - /** - * Constructs a new HttpRule. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.IHttpRule); - - /** HttpRule selector. */ - public selector: string; - - /** HttpRule get. */ - public get?: (string|null); - - /** HttpRule put. */ - public put?: (string|null); - - /** HttpRule post. */ - public post?: (string|null); - - /** HttpRule delete. */ - public delete?: (string|null); - - /** HttpRule patch. */ - public patch?: (string|null); - - /** HttpRule custom. */ - public custom?: (google.api.ICustomHttpPattern|null); - - /** HttpRule body. */ - public body: string; - - /** HttpRule responseBody. */ - public responseBody: string; - - /** HttpRule additionalBindings. */ - public additionalBindings: google.api.IHttpRule[]; - - /** HttpRule pattern. */ - public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); - - /** - * Creates a new HttpRule instance using the specified properties. - * @param [properties] Properties to set - * @returns HttpRule instance - */ - public static create(properties?: google.api.IHttpRule): google.api.HttpRule; - - /** - * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @param message HttpRule message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @param message HttpRule message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a HttpRule message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns HttpRule - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; - - /** - * Decodes a HttpRule message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns HttpRule - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; - - /** - * Verifies a HttpRule message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns HttpRule - */ - public static fromObject(object: { [k: string]: any }): google.api.HttpRule; - - /** - * Creates a plain object from a HttpRule message. Also converts values to other types if specified. - * @param message HttpRule - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this HttpRule to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for HttpRule - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** Properties of a CustomHttpPattern. */ - interface ICustomHttpPattern { - - /** CustomHttpPattern kind */ - kind?: (string|null); - - /** CustomHttpPattern path */ - path?: (string|null); - } - - /** Represents a CustomHttpPattern. */ - class CustomHttpPattern implements ICustomHttpPattern { - - /** - * Constructs a new CustomHttpPattern. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.ICustomHttpPattern); - - /** CustomHttpPattern kind. */ - public kind: string; - - /** CustomHttpPattern path. */ - public path: string; - - /** - * Creates a new CustomHttpPattern instance using the specified properties. - * @param [properties] Properties to set - * @returns CustomHttpPattern instance - */ - public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; - - /** - * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @param message CustomHttpPattern message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @param message CustomHttpPattern message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a CustomHttpPattern message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns CustomHttpPattern - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; - - /** - * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns CustomHttpPattern - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; - - /** - * Verifies a CustomHttpPattern message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns CustomHttpPattern - */ - public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; - - /** - * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. - * @param message CustomHttpPattern - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this CustomHttpPattern to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for CustomHttpPattern - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - /** FieldBehavior enum. */ - enum FieldBehavior { - FIELD_BEHAVIOR_UNSPECIFIED = 0, - OPTIONAL = 1, - REQUIRED = 2, - OUTPUT_ONLY = 3, - INPUT_ONLY = 4, - IMMUTABLE = 5, - UNORDERED_LIST = 6, - NON_EMPTY_DEFAULT = 7 - } - - /** Properties of a ResourceDescriptor. */ - interface IResourceDescriptor { - - /** ResourceDescriptor type */ - type?: (string|null); - - /** ResourceDescriptor pattern */ - pattern?: (string[]|null); - - /** ResourceDescriptor nameField */ - nameField?: (string|null); - - /** ResourceDescriptor history */ - history?: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History|null); - - /** ResourceDescriptor plural */ - plural?: (string|null); - - /** ResourceDescriptor singular */ - singular?: (string|null); - - /** ResourceDescriptor style */ - style?: (google.api.ResourceDescriptor.Style[]|null); - } - - /** Represents a ResourceDescriptor. */ - class ResourceDescriptor implements IResourceDescriptor { - - /** - * Constructs a new ResourceDescriptor. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.IResourceDescriptor); - - /** ResourceDescriptor type. */ - public type: string; - - /** ResourceDescriptor pattern. */ - public pattern: string[]; - - /** ResourceDescriptor nameField. */ - public nameField: string; - - /** ResourceDescriptor history. */ - public history: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History); - - /** ResourceDescriptor plural. */ - public plural: string; - - /** ResourceDescriptor singular. */ - public singular: string; - - /** ResourceDescriptor style. */ - public style: google.api.ResourceDescriptor.Style[]; - - /** - * Creates a new ResourceDescriptor instance using the specified properties. - * @param [properties] Properties to set - * @returns ResourceDescriptor instance - */ - public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; - - /** - * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @param message ResourceDescriptor message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @param message ResourceDescriptor message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ResourceDescriptor message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ResourceDescriptor - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; - - /** - * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ResourceDescriptor - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; - - /** - * Verifies a ResourceDescriptor message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ResourceDescriptor - */ - public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; - - /** - * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. - * @param message ResourceDescriptor - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ResourceDescriptor to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ResourceDescriptor - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace ResourceDescriptor { - - /** History enum. */ - enum History { - HISTORY_UNSPECIFIED = 0, - ORIGINALLY_SINGLE_PATTERN = 1, - FUTURE_MULTI_PATTERN = 2 - } - - /** Style enum. */ - enum Style { - STYLE_UNSPECIFIED = 0, - DECLARATIVE_FRIENDLY = 1 - } - } - - /** Properties of a ResourceReference. */ - interface IResourceReference { - - /** ResourceReference type */ - type?: (string|null); - - /** ResourceReference childType */ - childType?: (string|null); - } - - /** Represents a ResourceReference. */ - class ResourceReference implements IResourceReference { - - /** - * Constructs a new ResourceReference. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.IResourceReference); - - /** ResourceReference type. */ - public type: string; - - /** ResourceReference childType. */ - public childType: string; - - /** - * Creates a new ResourceReference instance using the specified properties. - * @param [properties] Properties to set - * @returns ResourceReference instance - */ - public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; - - /** - * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @param message ResourceReference message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @param message ResourceReference message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a ResourceReference message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ResourceReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; - - /** - * Decodes a ResourceReference message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ResourceReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; - - /** - * Verifies a ResourceReference message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ResourceReference - */ - public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; - - /** - * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. - * @param message ResourceReference - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ResourceReference to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ResourceReference - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - - /** Namespace rpc. */ - namespace rpc { - - /** Properties of a Status. */ - interface IStatus { - - /** Status code */ - code?: (number|null); - - /** Status message */ - message?: (string|null); - - /** Status details */ - details?: (google.protobuf.IAny[]|null); - } - - /** Represents a Status. */ - class Status implements IStatus { - - /** - * Constructs a new Status. - * @param [properties] Properties to set - */ - constructor(properties?: google.rpc.IStatus); - - /** Status code. */ - public code: number; - - /** Status message. */ - public message: string; - - /** Status details. */ - public details: google.protobuf.IAny[]; - - /** - * Creates a new Status instance using the specified properties. - * @param [properties] Properties to set - * @returns Status instance - */ - public static create(properties?: google.rpc.IStatus): google.rpc.Status; - - /** - * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. - * @param message Status message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. - * @param message Status message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Status message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Status - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.rpc.Status; - - /** - * Decodes a Status message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Status - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.rpc.Status; - - /** - * Verifies a Status message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Status message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Status - */ - public static fromObject(object: { [k: string]: any }): google.rpc.Status; - - /** - * Creates a plain object from a Status message. Also converts values to other types if specified. - * @param message Status - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.rpc.Status, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Status to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Status - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } -} diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js deleted file mode 100644 index 53da7298f1a..00000000000 --- a/handwritten/bigquery-storage/protos/protos.js +++ /dev/null @@ -1,29973 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/ -(function(global, factory) { /* global define, require, module */ - - /* AMD */ if (typeof define === 'function' && define.amd) - define(["protobufjs/minimal"], factory); - - /* CommonJS */ else if (typeof require === 'function' && typeof module === 'object' && module && module.exports) - module.exports = factory(require("google-gax/build/src/protobuf").protobufMinimal); - -})(this, function($protobuf) { - "use strict"; - - // Common aliases - var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; - - // Exported root namespace - var $root = $protobuf.roots._google_cloud_bigquery_storage_protos || ($protobuf.roots._google_cloud_bigquery_storage_protos = {}); - - $root.google = (function() { - - /** - * Namespace google. - * @exports google - * @namespace - */ - var google = {}; - - google.cloud = (function() { - - /** - * Namespace cloud. - * @memberof google - * @namespace - */ - var cloud = {}; - - cloud.bigquery = (function() { - - /** - * Namespace bigquery. - * @memberof google.cloud - * @namespace - */ - var bigquery = {}; - - bigquery.storage = (function() { - - /** - * Namespace storage. - * @memberof google.cloud.bigquery - * @namespace - */ - var storage = {}; - - storage.v1 = (function() { - - /** - * Namespace v1. - * @memberof google.cloud.bigquery.storage - * @namespace - */ - var v1 = {}; - - v1.ArrowSchema = (function() { - - /** - * Properties of an ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IArrowSchema - * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema - */ - - /** - * Constructs a new ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an ArrowSchema. - * @implements IArrowSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set - */ - function ArrowSchema(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ArrowSchema serializedSchema. - * @member {Uint8Array} serializedSchema - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @instance - */ - ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); - - /** - * Creates a new ArrowSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema instance - */ - ArrowSchema.create = function create(properties) { - return new ArrowSchema(properties); - }; - - /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); - return writer; - }; - - /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedSchema = reader.bytes(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ArrowSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ArrowSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) - return "serializedSchema: buffer expected"; - return null; - }; - - /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema - */ - ArrowSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); - if (object.serializedSchema != null) - if (typeof object.serializedSchema === "string") - $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); - else if (object.serializedSchema.length >= 0) - message.serializedSchema = object.serializedSchema; - return message; - }; - - /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ArrowSchema} message ArrowSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ArrowSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if (options.bytes === String) - object.serializedSchema = ""; - else { - object.serializedSchema = []; - if (options.bytes !== Array) - object.serializedSchema = $util.newBuffer(object.serializedSchema); - } - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; - return object; - }; - - /** - * Converts this ArrowSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @instance - * @returns {Object.} JSON object - */ - ArrowSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ArrowSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ArrowSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSchema"; - }; - - return ArrowSchema; - })(); - - v1.ArrowRecordBatch = (function() { - - /** - * Properties of an ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IArrowRecordBatch - * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch - * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount - */ - - /** - * Constructs a new ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an ArrowRecordBatch. - * @implements IArrowRecordBatch - * @constructor - * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set - */ - function ArrowRecordBatch(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ArrowRecordBatch serializedRecordBatch. - * @member {Uint8Array} serializedRecordBatch - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @instance - */ - ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); - - /** - * ArrowRecordBatch rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @instance - */ - ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new ArrowRecordBatch instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch instance - */ - ArrowRecordBatch.create = function create(properties) { - return new ArrowRecordBatch(properties); - }; - - /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowRecordBatch.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowRecordBatch.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedRecordBatch = reader.bytes(); - break; - } - case 2: { - message.rowCount = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ArrowRecordBatch message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ArrowRecordBatch.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) - return "serializedRecordBatch: buffer expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - return null; - }; - - /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch - */ - ArrowRecordBatch.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); - if (object.serializedRecordBatch != null) - if (typeof object.serializedRecordBatch === "string") - $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); - else if (object.serializedRecordBatch.length >= 0) - message.serializedRecordBatch = object.serializedRecordBatch; - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1.ArrowRecordBatch} message ArrowRecordBatch - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ArrowRecordBatch.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedRecordBatch = ""; - else { - object.serializedRecordBatch = []; - if (options.bytes !== Array) - object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); - } - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this ArrowRecordBatch to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @instance - * @returns {Object.} JSON object - */ - ArrowRecordBatch.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ArrowRecordBatch - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowRecordBatch"; - }; - - return ArrowRecordBatch; - })(); - - v1.ArrowSerializationOptions = (function() { - - /** - * Properties of an ArrowSerializationOptions. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IArrowSerializationOptions - * @property {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null} [bufferCompression] ArrowSerializationOptions bufferCompression - */ - - /** - * Constructs a new ArrowSerializationOptions. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an ArrowSerializationOptions. - * @implements IArrowSerializationOptions - * @constructor - * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set - */ - function ArrowSerializationOptions(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ArrowSerializationOptions bufferCompression. - * @member {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec} bufferCompression - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @instance - */ - ArrowSerializationOptions.prototype.bufferCompression = 0; - - /** - * Creates a new ArrowSerializationOptions instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions instance - */ - ArrowSerializationOptions.create = function create(properties) { - return new ArrowSerializationOptions(properties); - }; - - /** - * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSerializationOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.bufferCompression != null && Object.hasOwnProperty.call(message, "bufferCompression")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.bufferCompression); - return writer; - }; - - /** - * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ArrowSerializationOptions message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSerializationOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - message.bufferCompression = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSerializationOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ArrowSerializationOptions message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ArrowSerializationOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) - switch (message.bufferCompression) { - default: - return "bufferCompression: enum value expected"; - case 0: - case 1: - case 2: - break; - } - return null; - }; - - /** - * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions - */ - ArrowSerializationOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); - switch (object.bufferCompression) { - default: - if (typeof object.bufferCompression === "number") { - message.bufferCompression = object.bufferCompression; - break; - } - break; - case "COMPRESSION_UNSPECIFIED": - case 0: - message.bufferCompression = 0; - break; - case "LZ4_FRAME": - case 1: - message.bufferCompression = 1; - break; - case "ZSTD": - case 2: - message.bufferCompression = 2; - break; - } - return message; - }; - - /** - * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} message ArrowSerializationOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ArrowSerializationOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.bufferCompression = options.enums === String ? "COMPRESSION_UNSPECIFIED" : 0; - if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) - object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] === undefined ? message.bufferCompression : $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; - return object; - }; - - /** - * Converts this ArrowSerializationOptions to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @instance - * @returns {Object.} JSON object - */ - ArrowSerializationOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ArrowSerializationOptions - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ArrowSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSerializationOptions"; - }; - - /** - * CompressionCodec enum. - * @name google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec - * @enum {number} - * @property {number} COMPRESSION_UNSPECIFIED=0 COMPRESSION_UNSPECIFIED value - * @property {number} LZ4_FRAME=1 LZ4_FRAME value - * @property {number} ZSTD=2 ZSTD value - */ - ArrowSerializationOptions.CompressionCodec = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "COMPRESSION_UNSPECIFIED"] = 0; - values[valuesById[1] = "LZ4_FRAME"] = 1; - values[valuesById[2] = "ZSTD"] = 2; - return values; - })(); - - return ArrowSerializationOptions; - })(); - - v1.AvroSchema = (function() { - - /** - * Properties of an AvroSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IAvroSchema - * @property {string|null} [schema] AvroSchema schema - */ - - /** - * Constructs a new AvroSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an AvroSchema. - * @implements IAvroSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set - */ - function AvroSchema(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AvroSchema schema. - * @member {string} schema - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @instance - */ - AvroSchema.prototype.schema = ""; - - /** - * Creates a new AvroSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema instance - */ - AvroSchema.create = function create(properties) { - return new AvroSchema(properties); - }; - - /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); - return writer; - }; - - /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AvroSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.schema = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AvroSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AvroSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.schema != null && message.hasOwnProperty("schema")) - if (!$util.isString(message.schema)) - return "schema: string expected"; - return null; - }; - - /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema - */ - AvroSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); - if (object.schema != null) - message.schema = String(object.schema); - return message; - }; - - /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1.AvroSchema} message AvroSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AvroSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.schema = ""; - if (message.schema != null && message.hasOwnProperty("schema")) - object.schema = message.schema; - return object; - }; - - /** - * Converts this AvroSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @instance - * @returns {Object.} JSON object - */ - AvroSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AvroSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AvroSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSchema"; - }; - - return AvroSchema; - })(); - - v1.AvroRows = (function() { - - /** - * Properties of an AvroRows. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IAvroRows - * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows - * @property {number|Long|null} [rowCount] AvroRows rowCount - */ - - /** - * Constructs a new AvroRows. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an AvroRows. - * @implements IAvroRows - * @constructor - * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set - */ - function AvroRows(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AvroRows serializedBinaryRows. - * @member {Uint8Array} serializedBinaryRows - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @instance - */ - AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); - - /** - * AvroRows rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @instance - */ - AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new AvroRows instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows instance - */ - AvroRows.create = function create(properties) { - return new AvroRows(properties); - }; - - /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroRows.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroRows.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AvroRows message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroRows.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedBinaryRows = reader.bytes(); - break; - } - case 2: { - message.rowCount = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroRows.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AvroRows message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AvroRows.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) - return "serializedBinaryRows: buffer expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - return null; - }; - - /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows - */ - AvroRows.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroRows) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); - if (object.serializedBinaryRows != null) - if (typeof object.serializedBinaryRows === "string") - $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); - else if (object.serializedBinaryRows.length >= 0) - message.serializedBinaryRows = object.serializedBinaryRows; - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1.AvroRows} message AvroRows - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AvroRows.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedBinaryRows = ""; - else { - object.serializedBinaryRows = []; - if (options.bytes !== Array) - object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); - } - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this AvroRows to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @instance - * @returns {Object.} JSON object - */ - AvroRows.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AvroRows - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AvroRows - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroRows"; - }; - - return AvroRows; - })(); - - v1.AvroSerializationOptions = (function() { - - /** - * Properties of an AvroSerializationOptions. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IAvroSerializationOptions - * @property {boolean|null} [enableDisplayNameAttribute] AvroSerializationOptions enableDisplayNameAttribute - */ - - /** - * Constructs a new AvroSerializationOptions. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an AvroSerializationOptions. - * @implements IAvroSerializationOptions - * @constructor - * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set - */ - function AvroSerializationOptions(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AvroSerializationOptions enableDisplayNameAttribute. - * @member {boolean} enableDisplayNameAttribute - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @instance - */ - AvroSerializationOptions.prototype.enableDisplayNameAttribute = false; - - /** - * Creates a new AvroSerializationOptions instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions instance - */ - AvroSerializationOptions.create = function create(properties) { - return new AvroSerializationOptions(properties); - }; - - /** - * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSerializationOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.enableDisplayNameAttribute != null && Object.hasOwnProperty.call(message, "enableDisplayNameAttribute")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.enableDisplayNameAttribute); - return writer; - }; - - /** - * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AvroSerializationOptions message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSerializationOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.enableDisplayNameAttribute = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSerializationOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AvroSerializationOptions message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AvroSerializationOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) - if (typeof message.enableDisplayNameAttribute !== "boolean") - return "enableDisplayNameAttribute: boolean expected"; - return null; - }; - - /** - * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions - */ - AvroSerializationOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); - if (object.enableDisplayNameAttribute != null) - message.enableDisplayNameAttribute = Boolean(object.enableDisplayNameAttribute); - return message; - }; - - /** - * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {google.cloud.bigquery.storage.v1.AvroSerializationOptions} message AvroSerializationOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AvroSerializationOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.enableDisplayNameAttribute = false; - if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) - object.enableDisplayNameAttribute = message.enableDisplayNameAttribute; - return object; - }; - - /** - * Converts this AvroSerializationOptions to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @instance - * @returns {Object.} JSON object - */ - AvroSerializationOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AvroSerializationOptions - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AvroSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSerializationOptions"; - }; - - return AvroSerializationOptions; - })(); - - v1.ProtoSchema = (function() { - - /** - * Properties of a ProtoSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IProtoSchema - * @property {google.protobuf.IDescriptorProto|null} [protoDescriptor] ProtoSchema protoDescriptor - */ - - /** - * Constructs a new ProtoSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ProtoSchema. - * @implements IProtoSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set - */ - function ProtoSchema(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ProtoSchema protoDescriptor. - * @member {google.protobuf.IDescriptorProto|null|undefined} protoDescriptor - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @instance - */ - ProtoSchema.prototype.protoDescriptor = null; - - /** - * Creates a new ProtoSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema instance - */ - ProtoSchema.create = function create(properties) { - return new ProtoSchema(properties); - }; - - /** - * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.protoDescriptor != null && Object.hasOwnProperty.call(message, "protoDescriptor")) - $root.google.protobuf.DescriptorProto.encode(message.protoDescriptor, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ProtoSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ProtoSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ProtoSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) { - var error = $root.google.protobuf.DescriptorProto.verify(message.protoDescriptor); - if (error) - return "protoDescriptor." + error; - } - return null; - }; - - /** - * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema - */ - ProtoSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); - if (object.protoDescriptor != null) { - if (typeof object.protoDescriptor !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ProtoSchema.protoDescriptor: object expected"); - message.protoDescriptor = $root.google.protobuf.DescriptorProto.fromObject(object.protoDescriptor); - } - return message; - }; - - /** - * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ProtoSchema} message ProtoSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ProtoSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.protoDescriptor = null; - if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) - object.protoDescriptor = $root.google.protobuf.DescriptorProto.toObject(message.protoDescriptor, options); - return object; - }; - - /** - * Converts this ProtoSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @instance - * @returns {Object.} JSON object - */ - ProtoSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ProtoSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ProtoSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ProtoSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoSchema"; - }; - - return ProtoSchema; - })(); - - v1.ProtoRows = (function() { - - /** - * Properties of a ProtoRows. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IProtoRows - * @property {Array.|null} [serializedRows] ProtoRows serializedRows - */ - - /** - * Constructs a new ProtoRows. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ProtoRows. - * @implements IProtoRows - * @constructor - * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set - */ - function ProtoRows(properties) { - this.serializedRows = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ProtoRows serializedRows. - * @member {Array.} serializedRows - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @instance - */ - ProtoRows.prototype.serializedRows = $util.emptyArray; - - /** - * Creates a new ProtoRows instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows instance - */ - ProtoRows.create = function create(properties) { - return new ProtoRows(properties); - }; - - /** - * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoRows.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedRows != null && message.serializedRows.length) - for (var i = 0; i < message.serializedRows.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRows[i]); - return writer; - }; - - /** - * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoRows.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ProtoRows message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoRows.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.serializedRows && message.serializedRows.length)) - message.serializedRows = []; - message.serializedRows.push(reader.bytes()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ProtoRows message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoRows.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ProtoRows message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ProtoRows.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedRows != null && message.hasOwnProperty("serializedRows")) { - if (!Array.isArray(message.serializedRows)) - return "serializedRows: array expected"; - for (var i = 0; i < message.serializedRows.length; ++i) - if (!(message.serializedRows[i] && typeof message.serializedRows[i].length === "number" || $util.isString(message.serializedRows[i]))) - return "serializedRows: buffer[] expected"; - } - return null; - }; - - /** - * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows - */ - ProtoRows.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoRows) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); - if (object.serializedRows) { - if (!Array.isArray(object.serializedRows)) - throw TypeError(".google.cloud.bigquery.storage.v1.ProtoRows.serializedRows: array expected"); - message.serializedRows = []; - for (var i = 0; i < object.serializedRows.length; ++i) - if (typeof object.serializedRows[i] === "string") - $util.base64.decode(object.serializedRows[i], message.serializedRows[i] = $util.newBuffer($util.base64.length(object.serializedRows[i])), 0); - else if (object.serializedRows[i].length >= 0) - message.serializedRows[i] = object.serializedRows[i]; - } - return message; - }; - - /** - * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {google.cloud.bigquery.storage.v1.ProtoRows} message ProtoRows - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ProtoRows.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.serializedRows = []; - if (message.serializedRows && message.serializedRows.length) { - object.serializedRows = []; - for (var j = 0; j < message.serializedRows.length; ++j) - object.serializedRows[j] = options.bytes === String ? $util.base64.encode(message.serializedRows[j], 0, message.serializedRows[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRows[j]) : message.serializedRows[j]; - } - return object; - }; - - /** - * Converts this ProtoRows to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @instance - * @returns {Object.} JSON object - */ - ProtoRows.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ProtoRows - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ProtoRows - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ProtoRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoRows"; - }; - - return ProtoRows; - })(); - - v1.BigQueryRead = (function() { - - /** - * Constructs a new BigQueryRead service. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a BigQueryRead - * @extends $protobuf.rpc.Service - * @constructor - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - */ - function BigQueryRead(rpcImpl, requestDelimited, responseDelimited) { - $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); - } - - (BigQueryRead.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryRead; - - /** - * Creates new BigQueryRead service using the specified rpc implementation. - * @function create - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @static - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - * @returns {BigQueryRead} RPC service. Useful where requests and/or responses are streamed. - */ - BigQueryRead.create = function create(rpcImpl, requestDelimited, responseDelimited) { - return new this(rpcImpl, requestDelimited, responseDelimited); - }; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @typedef CreateReadSessionCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.ReadSession} [response] ReadSession - */ - - /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryRead.prototype.createReadSession = function createReadSession(request, callback) { - return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1.ReadSession, request, callback); - }, "name", { value: "CreateReadSession" }); - - /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @typedef ReadRowsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} [response] ReadRowsResponse - */ - - /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryRead.prototype.readRows = function readRows(request, callback) { - return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1.ReadRowsResponse, request, callback); - }, "name", { value: "ReadRows" }); - - /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @typedef SplitReadStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} [response] SplitReadStreamResponse - */ - - /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryRead.prototype.splitReadStream = function splitReadStream(request, callback) { - return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse, request, callback); - }, "name", { value: "SplitReadStream" }); - - /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryRead - * @instance - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - return BigQueryRead; - })(); - - v1.BigQueryWrite = (function() { - - /** - * Constructs a new BigQueryWrite service. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a BigQueryWrite - * @extends $protobuf.rpc.Service - * @constructor - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - */ - function BigQueryWrite(rpcImpl, requestDelimited, responseDelimited) { - $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); - } - - (BigQueryWrite.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryWrite; - - /** - * Creates new BigQueryWrite service using the specified rpc implementation. - * @function create - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @static - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - * @returns {BigQueryWrite} RPC service. Useful where requests and/or responses are streamed. - */ - BigQueryWrite.create = function create(rpcImpl, requestDelimited, responseDelimited) { - return new this(rpcImpl, requestDelimited, responseDelimited); - }; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef CreateWriteStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream - */ - - /** - * Calls CreateWriteStream. - * @function createWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.createWriteStream = function createWriteStream(request, callback) { - return this.rpcCall(createWriteStream, $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); - }, "name", { value: "CreateWriteStream" }); - - /** - * Calls CreateWriteStream. - * @function createWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef AppendRowsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} [response] AppendRowsResponse - */ - - /** - * Calls AppendRows. - * @function appendRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback} callback Node-style callback called with the error, if any, and AppendRowsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.appendRows = function appendRows(request, callback) { - return this.rpcCall(appendRows, $root.google.cloud.bigquery.storage.v1.AppendRowsRequest, $root.google.cloud.bigquery.storage.v1.AppendRowsResponse, request, callback); - }, "name", { value: "AppendRows" }); - - /** - * Calls AppendRows. - * @function appendRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef GetWriteStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream - */ - - /** - * Calls GetWriteStream. - * @function getWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.getWriteStream = function getWriteStream(request, callback) { - return this.rpcCall(getWriteStream, $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); - }, "name", { value: "GetWriteStream" }); - - /** - * Calls GetWriteStream. - * @function getWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef FinalizeWriteStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} [response] FinalizeWriteStreamResponse - */ - - /** - * Calls FinalizeWriteStream. - * @function finalizeWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback} callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.finalizeWriteStream = function finalizeWriteStream(request, callback) { - return this.rpcCall(finalizeWriteStream, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, request, callback); - }, "name", { value: "FinalizeWriteStream" }); - - /** - * Calls FinalizeWriteStream. - * @function finalizeWriteStream - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef BatchCommitWriteStreamsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} [response] BatchCommitWriteStreamsResponse - */ - - /** - * Calls BatchCommitWriteStreams. - * @function batchCommitWriteStreams - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback} callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.batchCommitWriteStreams = function batchCommitWriteStreams(request, callback) { - return this.rpcCall(batchCommitWriteStreams, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, request, callback); - }, "name", { value: "BatchCommitWriteStreams" }); - - /** - * Calls BatchCommitWriteStreams. - * @function batchCommitWriteStreams - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @typedef FlushRowsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} [response] FlushRowsResponse - */ - - /** - * Calls FlushRows. - * @function flushRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback} callback Node-style callback called with the error, if any, and FlushRowsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryWrite.prototype.flushRows = function flushRows(request, callback) { - return this.rpcCall(flushRows, $root.google.cloud.bigquery.storage.v1.FlushRowsRequest, $root.google.cloud.bigquery.storage.v1.FlushRowsResponse, request, callback); - }, "name", { value: "FlushRows" }); - - /** - * Calls FlushRows. - * @function flushRows - * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite - * @instance - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - return BigQueryWrite; - })(); - - v1.CreateReadSessionRequest = (function() { - - /** - * Properties of a CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ICreateReadSessionRequest - * @property {string|null} [parent] CreateReadSessionRequest parent - * @property {google.cloud.bigquery.storage.v1.IReadSession|null} [readSession] CreateReadSessionRequest readSession - * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount - * @property {number|null} [preferredMinStreamCount] CreateReadSessionRequest preferredMinStreamCount - */ - - /** - * Constructs a new CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a CreateReadSessionRequest. - * @implements ICreateReadSessionRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set - */ - function CreateReadSessionRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * CreateReadSessionRequest parent. - * @member {string} parent - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.parent = ""; - - /** - * CreateReadSessionRequest readSession. - * @member {google.cloud.bigquery.storage.v1.IReadSession|null|undefined} readSession - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.readSession = null; - - /** - * CreateReadSessionRequest maxStreamCount. - * @member {number} maxStreamCount - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.maxStreamCount = 0; - - /** - * CreateReadSessionRequest preferredMinStreamCount. - * @member {number} preferredMinStreamCount - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.preferredMinStreamCount = 0; - - /** - * Creates a new CreateReadSessionRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest instance - */ - CreateReadSessionRequest.create = function create(properties) { - return new CreateReadSessionRequest(properties); - }; - - /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateReadSessionRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.readSession != null && Object.hasOwnProperty.call(message, "readSession")) - $root.google.cloud.bigquery.storage.v1.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.maxStreamCount != null && Object.hasOwnProperty.call(message, "maxStreamCount")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount); - if (message.preferredMinStreamCount != null && Object.hasOwnProperty.call(message, "preferredMinStreamCount")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.preferredMinStreamCount); - return writer; - }; - - /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateReadSessionRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.parent = reader.string(); - break; - } - case 2: { - message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.decode(reader, reader.uint32()); - break; - } - case 3: { - message.maxStreamCount = reader.int32(); - break; - } - case 4: { - message.preferredMinStreamCount = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a CreateReadSessionRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - CreateReadSessionRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.parent != null && message.hasOwnProperty("parent")) - if (!$util.isString(message.parent)) - return "parent: string expected"; - if (message.readSession != null && message.hasOwnProperty("readSession")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadSession.verify(message.readSession); - if (error) - return "readSession." + error; - } - if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) - if (!$util.isInteger(message.maxStreamCount)) - return "maxStreamCount: integer expected"; - if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) - if (!$util.isInteger(message.preferredMinStreamCount)) - return "preferredMinStreamCount: integer expected"; - return null; - }; - - /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest - */ - CreateReadSessionRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); - if (object.parent != null) - message.parent = String(object.parent); - if (object.readSession != null) { - if (typeof object.readSession !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.CreateReadSessionRequest.readSession: object expected"); - message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.fromObject(object.readSession); - } - if (object.maxStreamCount != null) - message.maxStreamCount = object.maxStreamCount | 0; - if (object.preferredMinStreamCount != null) - message.preferredMinStreamCount = object.preferredMinStreamCount | 0; - return message; - }; - - /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} message CreateReadSessionRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - CreateReadSessionRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.parent = ""; - object.readSession = null; - object.maxStreamCount = 0; - object.preferredMinStreamCount = 0; - } - if (message.parent != null && message.hasOwnProperty("parent")) - object.parent = message.parent; - if (message.readSession != null && message.hasOwnProperty("readSession")) - object.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.toObject(message.readSession, options); - if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) - object.maxStreamCount = message.maxStreamCount; - if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) - object.preferredMinStreamCount = message.preferredMinStreamCount; - return object; - }; - - /** - * Converts this CreateReadSessionRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @instance - * @returns {Object.} JSON object - */ - CreateReadSessionRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for CreateReadSessionRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateReadSessionRequest"; - }; - - return CreateReadSessionRequest; - })(); - - v1.ReadRowsRequest = (function() { - - /** - * Properties of a ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadRowsRequest - * @property {string|null} [readStream] ReadRowsRequest readStream - * @property {number|Long|null} [offset] ReadRowsRequest offset - */ - - /** - * Constructs a new ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadRowsRequest. - * @implements IReadRowsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set - */ - function ReadRowsRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadRowsRequest readStream. - * @member {string} readStream - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @instance - */ - ReadRowsRequest.prototype.readStream = ""; - - /** - * ReadRowsRequest offset. - * @member {number|Long} offset - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @instance - */ - ReadRowsRequest.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new ReadRowsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest instance - */ - ReadRowsRequest.create = function create(properties) { - return new ReadRowsRequest(properties); - }; - - /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.readStream != null && Object.hasOwnProperty.call(message, "readStream")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.readStream); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); - return writer; - }; - - /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.readStream = reader.string(); - break; - } - case 2: { - message.offset = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadRowsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadRowsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.readStream != null && message.hasOwnProperty("readStream")) - if (!$util.isString(message.readStream)) - return "readStream: string expected"; - if (message.offset != null && message.hasOwnProperty("offset")) - if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) - return "offset: integer|Long expected"; - return null; - }; - - /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest - */ - ReadRowsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); - if (object.readStream != null) - message.readStream = String(object.readStream); - if (object.offset != null) - if ($util.Long) - (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; - else if (typeof object.offset === "string") - message.offset = parseInt(object.offset, 10); - else if (typeof object.offset === "number") - message.offset = object.offset; - else if (typeof object.offset === "object") - message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ReadRowsRequest} message ReadRowsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadRowsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.readStream = ""; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.offset = options.longs === String ? "0" : 0; - } - if (message.readStream != null && message.hasOwnProperty("readStream")) - object.readStream = message.readStream; - if (message.offset != null && message.hasOwnProperty("offset")) - if (typeof message.offset === "number") - object.offset = options.longs === String ? String(message.offset) : message.offset; - else - object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; - return object; - }; - - /** - * Converts this ReadRowsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @instance - * @returns {Object.} JSON object - */ - ReadRowsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadRowsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsRequest"; - }; - - return ReadRowsRequest; - })(); - - v1.ThrottleState = (function() { - - /** - * Properties of a ThrottleState. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IThrottleState - * @property {number|null} [throttlePercent] ThrottleState throttlePercent - */ - - /** - * Constructs a new ThrottleState. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ThrottleState. - * @implements IThrottleState - * @constructor - * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set - */ - function ThrottleState(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ThrottleState throttlePercent. - * @member {number} throttlePercent - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @instance - */ - ThrottleState.prototype.throttlePercent = 0; - - /** - * Creates a new ThrottleState instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState instance - */ - ThrottleState.create = function create(properties) { - return new ThrottleState(properties); - }; - - /** - * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ThrottleState.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); - return writer; - }; - - /** - * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ThrottleState.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ThrottleState message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ThrottleState.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.throttlePercent = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ThrottleState message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ThrottleState.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ThrottleState message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ThrottleState.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - if (!$util.isInteger(message.throttlePercent)) - return "throttlePercent: integer expected"; - return null; - }; - - /** - * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState - */ - ThrottleState.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ThrottleState) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); - if (object.throttlePercent != null) - message.throttlePercent = object.throttlePercent | 0; - return message; - }; - - /** - * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {google.cloud.bigquery.storage.v1.ThrottleState} message ThrottleState - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ThrottleState.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.throttlePercent = 0; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - object.throttlePercent = message.throttlePercent; - return object; - }; - - /** - * Converts this ThrottleState to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @instance - * @returns {Object.} JSON object - */ - ThrottleState.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ThrottleState - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ThrottleState - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ThrottleState.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ThrottleState"; - }; - - return ThrottleState; - })(); - - v1.StreamStats = (function() { - - /** - * Properties of a StreamStats. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IStreamStats - * @property {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null} [progress] StreamStats progress - */ - - /** - * Constructs a new StreamStats. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a StreamStats. - * @implements IStreamStats - * @constructor - * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set - */ - function StreamStats(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * StreamStats progress. - * @member {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null|undefined} progress - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @instance - */ - StreamStats.prototype.progress = null; - - /** - * Creates a new StreamStats instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats instance - */ - StreamStats.create = function create(properties) { - return new StreamStats(properties); - }; - - /** - * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamStats.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) - $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.encode(message.progress, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamStats.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a StreamStats message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamStats.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a StreamStats message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamStats.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a StreamStats message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - StreamStats.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.progress != null && message.hasOwnProperty("progress")) { - var error = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.verify(message.progress); - if (error) - return "progress." + error; - } - return null; - }; - - /** - * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats - */ - StreamStats.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); - if (object.progress != null) { - if (typeof object.progress !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.StreamStats.progress: object expected"); - message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.fromObject(object.progress); - } - return message; - }; - - /** - * Creates a plain object from a StreamStats message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {google.cloud.bigquery.storage.v1.StreamStats} message StreamStats - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - StreamStats.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.progress = null; - if (message.progress != null && message.hasOwnProperty("progress")) - object.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.toObject(message.progress, options); - return object; - }; - - /** - * Converts this StreamStats to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @instance - * @returns {Object.} JSON object - */ - StreamStats.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for StreamStats - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - StreamStats.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats"; - }; - - StreamStats.Progress = (function() { - - /** - * Properties of a Progress. - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @interface IProgress - * @property {number|null} [atResponseStart] Progress atResponseStart - * @property {number|null} [atResponseEnd] Progress atResponseEnd - */ - - /** - * Constructs a new Progress. - * @memberof google.cloud.bigquery.storage.v1.StreamStats - * @classdesc Represents a Progress. - * @implements IProgress - * @constructor - * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set - */ - function Progress(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Progress atResponseStart. - * @member {number} atResponseStart - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @instance - */ - Progress.prototype.atResponseStart = 0; - - /** - * Progress atResponseEnd. - * @member {number} atResponseEnd - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @instance - */ - Progress.prototype.atResponseEnd = 0; - - /** - * Creates a new Progress instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress instance - */ - Progress.create = function create(properties) { - return new Progress(properties); - }; - - /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Progress.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) - writer.uint32(/* id 1, wireType 1 =*/9).double(message.atResponseStart); - if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) - writer.uint32(/* id 2, wireType 1 =*/17).double(message.atResponseEnd); - return writer; - }; - - /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Progress.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Progress message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Progress.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.atResponseStart = reader.double(); - break; - } - case 2: { - message.atResponseEnd = reader.double(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Progress message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Progress.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Progress message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Progress.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - if (typeof message.atResponseStart !== "number") - return "atResponseStart: number expected"; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - if (typeof message.atResponseEnd !== "number") - return "atResponseEnd: number expected"; - return null; - }; - - /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress - */ - Progress.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats.Progress) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); - if (object.atResponseStart != null) - message.atResponseStart = Number(object.atResponseStart); - if (object.atResponseEnd != null) - message.atResponseEnd = Number(object.atResponseEnd); - return message; - }; - - /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {google.cloud.bigquery.storage.v1.StreamStats.Progress} message Progress - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Progress.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.atResponseStart = 0; - object.atResponseEnd = 0; - } - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; - return object; - }; - - /** - * Converts this Progress to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @instance - * @returns {Object.} JSON object - */ - Progress.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Progress - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats.Progress"; - }; - - return Progress; - })(); - - return StreamStats; - })(); - - v1.ReadRowsResponse = (function() { - - /** - * Properties of a ReadRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadRowsResponse - * @property {google.cloud.bigquery.storage.v1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows - * @property {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch - * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount - * @property {google.cloud.bigquery.storage.v1.IStreamStats|null} [stats] ReadRowsResponse stats - * @property {google.cloud.bigquery.storage.v1.IThrottleState|null} [throttleState] ReadRowsResponse throttleState - * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema - * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema - */ - - /** - * Constructs a new ReadRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadRowsResponse. - * @implements IReadRowsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set - */ - function ReadRowsResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadRowsResponse avroRows. - * @member {google.cloud.bigquery.storage.v1.IAvroRows|null|undefined} avroRows - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.avroRows = null; - - /** - * ReadRowsResponse arrowRecordBatch. - * @member {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null|undefined} arrowRecordBatch - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.arrowRecordBatch = null; - - /** - * ReadRowsResponse rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * ReadRowsResponse stats. - * @member {google.cloud.bigquery.storage.v1.IStreamStats|null|undefined} stats - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.stats = null; - - /** - * ReadRowsResponse throttleState. - * @member {google.cloud.bigquery.storage.v1.IThrottleState|null|undefined} throttleState - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.throttleState = null; - - /** - * ReadRowsResponse avroSchema. - * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.avroSchema = null; - - /** - * ReadRowsResponse arrowSchema. - * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.arrowSchema = null; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * ReadRowsResponse rows. - * @member {"avroRows"|"arrowRecordBatch"|undefined} rows - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - Object.defineProperty(ReadRowsResponse.prototype, "rows", { - get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * ReadRowsResponse schema. - * @member {"avroSchema"|"arrowSchema"|undefined} schema - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ - Object.defineProperty(ReadRowsResponse.prototype, "schema", { - get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new ReadRowsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse instance - */ - ReadRowsResponse.create = function create(properties) { - return new ReadRowsResponse(properties); - }; - - /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.stats != null && Object.hasOwnProperty.call(message, "stats")) - $root.google.cloud.bigquery.storage.v1.StreamStats.encode(message.stats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) - $root.google.cloud.bigquery.storage.v1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) - $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.throttleState != null && Object.hasOwnProperty.call(message, "throttleState")) - $root.google.cloud.bigquery.storage.v1.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); - if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) - $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) - $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 3: { - message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); - break; - } - case 4: { - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); - break; - } - case 6: { - message.rowCount = reader.int64(); - break; - } - case 2: { - message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.decode(reader, reader.uint32()); - break; - } - case 5: { - message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); - break; - } - case 7: { - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); - break; - } - case 8: { - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadRowsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadRowsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AvroRows.verify(message.avroRows); - if (error) - return "avroRows." + error; - } - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - if (properties.rows === 1) - return "rows: multiple values"; - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify(message.arrowRecordBatch); - if (error) - return "arrowRecordBatch." + error; - } - } - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - if (message.stats != null && message.hasOwnProperty("stats")) { - var error = $root.google.cloud.bigquery.storage.v1.StreamStats.verify(message.stats); - if (error) - return "stats." + error; - } - if (message.throttleState != null && message.hasOwnProperty("throttleState")) { - var error = $root.google.cloud.bigquery.storage.v1.ThrottleState.verify(message.throttleState); - if (error) - return "throttleState." + error; - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); - if (error) - return "avroSchema." + error; - } - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - if (properties.schema === 1) - return "schema: multiple values"; - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); - if (error) - return "arrowSchema." + error; - } - } - return null; - }; - - /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse - */ - ReadRowsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); - if (object.avroRows != null) { - if (typeof object.avroRows !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroRows: object expected"); - message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.fromObject(object.avroRows); - } - if (object.arrowRecordBatch != null) { - if (typeof object.arrowRecordBatch !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowRecordBatch: object expected"); - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); - } - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - if (object.stats != null) { - if (typeof object.stats !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.stats: object expected"); - message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.fromObject(object.stats); - } - if (object.throttleState != null) { - if (typeof object.throttleState !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.throttleState: object expected"); - message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.fromObject(object.throttleState); - } - if (object.avroSchema != null) { - if (typeof object.avroSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroSchema: object expected"); - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); - } - if (object.arrowSchema != null) { - if (typeof object.arrowSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowSchema: object expected"); - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); - } - return message; - }; - - /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} message ReadRowsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadRowsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.stats = null; - object.throttleState = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.stats != null && message.hasOwnProperty("stats")) - object.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.toObject(message.stats, options); - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - object.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.toObject(message.avroRows, options); - if (options.oneofs) - object.rows = "avroRows"; - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); - if (options.oneofs) - object.rows = "arrowRecordBatch"; - } - if (message.throttleState != null && message.hasOwnProperty("throttleState")) - object.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.toObject(message.throttleState, options); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); - if (options.oneofs) - object.schema = "avroSchema"; - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); - if (options.oneofs) - object.schema = "arrowSchema"; - } - return object; - }; - - /** - * Converts this ReadRowsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - * @returns {Object.} JSON object - */ - ReadRowsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadRowsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsResponse"; - }; - - return ReadRowsResponse; - })(); - - v1.SplitReadStreamRequest = (function() { - - /** - * Properties of a SplitReadStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ISplitReadStreamRequest - * @property {string|null} [name] SplitReadStreamRequest name - * @property {number|null} [fraction] SplitReadStreamRequest fraction - */ - - /** - * Constructs a new SplitReadStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a SplitReadStreamRequest. - * @implements ISplitReadStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set - */ - function SplitReadStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SplitReadStreamRequest name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @instance - */ - SplitReadStreamRequest.prototype.name = ""; - - /** - * SplitReadStreamRequest fraction. - * @member {number} fraction - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @instance - */ - SplitReadStreamRequest.prototype.fraction = 0; - - /** - * Creates a new SplitReadStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest instance - */ - SplitReadStreamRequest.create = function create(properties) { - return new SplitReadStreamRequest(properties); - }; - - /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) - writer.uint32(/* id 2, wireType 1 =*/17).double(message.fraction); - return writer; - }; - - /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.fraction = reader.double(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SplitReadStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SplitReadStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.fraction != null && message.hasOwnProperty("fraction")) - if (typeof message.fraction !== "number") - return "fraction: number expected"; - return null; - }; - - /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest - */ - SplitReadStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); - if (object.name != null) - message.name = String(object.name); - if (object.fraction != null) - message.fraction = Number(object.fraction); - return message; - }; - - /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} message SplitReadStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SplitReadStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.fraction = 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.fraction != null && message.hasOwnProperty("fraction")) - object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; - return object; - }; - - /** - * Converts this SplitReadStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @instance - * @returns {Object.} JSON object - */ - SplitReadStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SplitReadStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamRequest"; - }; - - return SplitReadStreamRequest; - })(); - - v1.SplitReadStreamResponse = (function() { - - /** - * Properties of a SplitReadStreamResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ISplitReadStreamResponse - * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [primaryStream] SplitReadStreamResponse primaryStream - * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [remainderStream] SplitReadStreamResponse remainderStream - */ - - /** - * Constructs a new SplitReadStreamResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a SplitReadStreamResponse. - * @implements ISplitReadStreamResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set - */ - function SplitReadStreamResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SplitReadStreamResponse primaryStream. - * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} primaryStream - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @instance - */ - SplitReadStreamResponse.prototype.primaryStream = null; - - /** - * SplitReadStreamResponse remainderStream. - * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} remainderStream - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @instance - */ - SplitReadStreamResponse.prototype.remainderStream = null; - - /** - * Creates a new SplitReadStreamResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse instance - */ - SplitReadStreamResponse.create = function create(properties) { - return new SplitReadStreamResponse(properties); - }; - - /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) - $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) - $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); - break; - } - case 2: { - message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SplitReadStreamResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SplitReadStreamResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.primaryStream); - if (error) - return "primaryStream." + error; - } - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.remainderStream); - if (error) - return "remainderStream." + error; - } - return null; - }; - - /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse - */ - SplitReadStreamResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); - if (object.primaryStream != null) { - if (typeof object.primaryStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.primaryStream: object expected"); - message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.primaryStream); - } - if (object.remainderStream != null) { - if (typeof object.remainderStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.remainderStream: object expected"); - message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.remainderStream); - } - return message; - }; - - /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} message SplitReadStreamResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SplitReadStreamResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.primaryStream = null; - object.remainderStream = null; - } - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) - object.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.primaryStream, options); - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) - object.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.remainderStream, options); - return object; - }; - - /** - * Converts this SplitReadStreamResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @instance - * @returns {Object.} JSON object - */ - SplitReadStreamResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SplitReadStreamResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamResponse"; - }; - - return SplitReadStreamResponse; - })(); - - v1.CreateWriteStreamRequest = (function() { - - /** - * Properties of a CreateWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ICreateWriteStreamRequest - * @property {string|null} [parent] CreateWriteStreamRequest parent - * @property {google.cloud.bigquery.storage.v1.IWriteStream|null} [writeStream] CreateWriteStreamRequest writeStream - */ - - /** - * Constructs a new CreateWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a CreateWriteStreamRequest. - * @implements ICreateWriteStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set - */ - function CreateWriteStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * CreateWriteStreamRequest parent. - * @member {string} parent - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @instance - */ - CreateWriteStreamRequest.prototype.parent = ""; - - /** - * CreateWriteStreamRequest writeStream. - * @member {google.cloud.bigquery.storage.v1.IWriteStream|null|undefined} writeStream - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @instance - */ - CreateWriteStreamRequest.prototype.writeStream = null; - - /** - * Creates a new CreateWriteStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest instance - */ - CreateWriteStreamRequest.create = function create(properties) { - return new CreateWriteStreamRequest(properties); - }; - - /** - * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateWriteStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) - $root.google.cloud.bigquery.storage.v1.WriteStream.encode(message.writeStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateWriteStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.parent = reader.string(); - break; - } - case 2: { - message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a CreateWriteStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - CreateWriteStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.parent != null && message.hasOwnProperty("parent")) - if (!$util.isString(message.parent)) - return "parent: string expected"; - if (message.writeStream != null && message.hasOwnProperty("writeStream")) { - var error = $root.google.cloud.bigquery.storage.v1.WriteStream.verify(message.writeStream); - if (error) - return "writeStream." + error; - } - return null; - }; - - /** - * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest - */ - CreateWriteStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); - if (object.parent != null) - message.parent = String(object.parent); - if (object.writeStream != null) { - if (typeof object.writeStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.writeStream: object expected"); - message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.fromObject(object.writeStream); - } - return message; - }; - - /** - * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} message CreateWriteStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - CreateWriteStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.parent = ""; - object.writeStream = null; - } - if (message.parent != null && message.hasOwnProperty("parent")) - object.parent = message.parent; - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - object.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.toObject(message.writeStream, options); - return object; - }; - - /** - * Converts this CreateWriteStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @instance - * @returns {Object.} JSON object - */ - CreateWriteStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for CreateWriteStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - CreateWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateWriteStreamRequest"; - }; - - return CreateWriteStreamRequest; - })(); - - v1.AppendRowsRequest = (function() { - - /** - * Properties of an AppendRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IAppendRowsRequest - * @property {string|null} [writeStream] AppendRowsRequest writeStream - * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset - * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows - * @property {string|null} [traceId] AppendRowsRequest traceId - */ - - /** - * Constructs a new AppendRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an AppendRowsRequest. - * @implements IAppendRowsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set - */ - function AppendRowsRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AppendRowsRequest writeStream. - * @member {string} writeStream - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - */ - AppendRowsRequest.prototype.writeStream = ""; - - /** - * AppendRowsRequest offset. - * @member {google.protobuf.IInt64Value|null|undefined} offset - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - */ - AppendRowsRequest.prototype.offset = null; - - /** - * AppendRowsRequest protoRows. - * @member {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null|undefined} protoRows - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - */ - AppendRowsRequest.prototype.protoRows = null; - - /** - * AppendRowsRequest traceId. - * @member {string} traceId - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - */ - AppendRowsRequest.prototype.traceId = ""; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * AppendRowsRequest rows. - * @member {"protoRows"|undefined} rows - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - */ - Object.defineProperty(AppendRowsRequest.prototype, "rows", { - get: $util.oneOfGetter($oneOfFields = ["protoRows"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new AppendRowsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest instance - */ - AppendRowsRequest.create = function create(properties) { - return new AppendRowsRequest(properties); - }; - - /** - * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendRowsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.protoRows != null && Object.hasOwnProperty.call(message, "protoRows")) - $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId); - return writer; - }; - - /** - * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AppendRowsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendRowsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.writeStream = reader.string(); - break; - } - case 2: { - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; - } - case 4: { - message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); - break; - } - case 6: { - message.traceId = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendRowsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AppendRowsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AppendRowsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - if (!$util.isString(message.writeStream)) - return "writeStream: string expected"; - if (message.offset != null && message.hasOwnProperty("offset")) { - var error = $root.google.protobuf.Int64Value.verify(message.offset); - if (error) - return "offset." + error; - } - if (message.protoRows != null && message.hasOwnProperty("protoRows")) { - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify(message.protoRows); - if (error) - return "protoRows." + error; - } - } - if (message.traceId != null && message.hasOwnProperty("traceId")) - if (!$util.isString(message.traceId)) - return "traceId: string expected"; - return null; - }; - - /** - * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest - */ - AppendRowsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); - if (object.writeStream != null) - message.writeStream = String(object.writeStream); - if (object.offset != null) { - if (typeof object.offset !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.offset: object expected"); - message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); - } - if (object.protoRows != null) { - if (typeof object.protoRows !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.protoRows: object expected"); - message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.fromObject(object.protoRows); - } - if (object.traceId != null) - message.traceId = String(object.traceId); - return message; - }; - - /** - * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest} message AppendRowsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AppendRowsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.writeStream = ""; - object.offset = null; - object.traceId = ""; - } - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - object.writeStream = message.writeStream; - if (message.offset != null && message.hasOwnProperty("offset")) - object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); - if (message.protoRows != null && message.hasOwnProperty("protoRows")) { - object.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.toObject(message.protoRows, options); - if (options.oneofs) - object.rows = "protoRows"; - } - if (message.traceId != null && message.hasOwnProperty("traceId")) - object.traceId = message.traceId; - return object; - }; - - /** - * Converts this AppendRowsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @instance - * @returns {Object.} JSON object - */ - AppendRowsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AppendRowsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AppendRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest"; - }; - - AppendRowsRequest.ProtoData = (function() { - - /** - * Properties of a ProtoData. - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @interface IProtoData - * @property {google.cloud.bigquery.storage.v1.IProtoSchema|null} [writerSchema] ProtoData writerSchema - * @property {google.cloud.bigquery.storage.v1.IProtoRows|null} [rows] ProtoData rows - */ - - /** - * Constructs a new ProtoData. - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest - * @classdesc Represents a ProtoData. - * @implements IProtoData - * @constructor - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set - */ - function ProtoData(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ProtoData writerSchema. - * @member {google.cloud.bigquery.storage.v1.IProtoSchema|null|undefined} writerSchema - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @instance - */ - ProtoData.prototype.writerSchema = null; - - /** - * ProtoData rows. - * @member {google.cloud.bigquery.storage.v1.IProtoRows|null|undefined} rows - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @instance - */ - ProtoData.prototype.rows = null; - - /** - * Creates a new ProtoData instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData instance - */ - ProtoData.create = function create(properties) { - return new ProtoData(properties); - }; - - /** - * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoData.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.writerSchema != null && Object.hasOwnProperty.call(message, "writerSchema")) - $root.google.cloud.bigquery.storage.v1.ProtoSchema.encode(message.writerSchema, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.rows != null && Object.hasOwnProperty.call(message, "rows")) - $root.google.cloud.bigquery.storage.v1.ProtoRows.encode(message.rows, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ProtoData.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ProtoData message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoData.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); - break; - } - case 2: { - message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ProtoData message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ProtoData.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ProtoData message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ProtoData.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) { - var error = $root.google.cloud.bigquery.storage.v1.ProtoSchema.verify(message.writerSchema); - if (error) - return "writerSchema." + error; - } - if (message.rows != null && message.hasOwnProperty("rows")) { - var error = $root.google.cloud.bigquery.storage.v1.ProtoRows.verify(message.rows); - if (error) - return "rows." + error; - } - return null; - }; - - /** - * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData - */ - ProtoData.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); - if (object.writerSchema != null) { - if (typeof object.writerSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.writerSchema: object expected"); - message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.fromObject(object.writerSchema); - } - if (object.rows != null) { - if (typeof object.rows !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.rows: object expected"); - message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.fromObject(object.rows); - } - return message; - }; - - /** - * Creates a plain object from a ProtoData message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} message ProtoData - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ProtoData.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.writerSchema = null; - object.rows = null; - } - if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) - object.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.toObject(message.writerSchema, options); - if (message.rows != null && message.hasOwnProperty("rows")) - object.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.toObject(message.rows, options); - return object; - }; - - /** - * Converts this ProtoData to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @instance - * @returns {Object.} JSON object - */ - ProtoData.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ProtoData - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ProtoData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"; - }; - - return ProtoData; - })(); - - return AppendRowsRequest; - })(); - - v1.AppendRowsResponse = (function() { - - /** - * Properties of an AppendRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IAppendRowsResponse - * @property {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null} [appendResult] AppendRowsResponse appendResult - * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error - * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema - * @property {Array.|null} [rowErrors] AppendRowsResponse rowErrors - * @property {string|null} [writeStream] AppendRowsResponse writeStream - */ - - /** - * Constructs a new AppendRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents an AppendRowsResponse. - * @implements IAppendRowsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set - */ - function AppendRowsResponse(properties) { - this.rowErrors = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AppendRowsResponse appendResult. - * @member {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null|undefined} appendResult - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - AppendRowsResponse.prototype.appendResult = null; - - /** - * AppendRowsResponse error. - * @member {google.rpc.IStatus|null|undefined} error - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - AppendRowsResponse.prototype.error = null; - - /** - * AppendRowsResponse updatedSchema. - * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} updatedSchema - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - AppendRowsResponse.prototype.updatedSchema = null; - - /** - * AppendRowsResponse rowErrors. - * @member {Array.} rowErrors - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - AppendRowsResponse.prototype.rowErrors = $util.emptyArray; - - /** - * AppendRowsResponse writeStream. - * @member {string} writeStream - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - AppendRowsResponse.prototype.writeStream = ""; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * AppendRowsResponse response. - * @member {"appendResult"|"error"|undefined} response - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - */ - Object.defineProperty(AppendRowsResponse.prototype, "response", { - get: $util.oneOfGetter($oneOfFields = ["appendResult", "error"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new AppendRowsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse instance - */ - AppendRowsResponse.create = function create(properties) { - return new AppendRowsResponse(properties); - }; - - /** - * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendRowsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.appendResult != null && Object.hasOwnProperty.call(message, "appendResult")) - $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.encode(message.appendResult, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.error != null && Object.hasOwnProperty.call(message, "error")) - $root.google.rpc.Status.encode(message.error, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.updatedSchema != null && Object.hasOwnProperty.call(message, "updatedSchema")) - $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.updatedSchema, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.rowErrors != null && message.rowErrors.length) - for (var i = 0; i < message.rowErrors.length; ++i) - $root.google.cloud.bigquery.storage.v1.RowError.encode(message.rowErrors[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.writeStream); - return writer; - }; - - /** - * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AppendRowsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendRowsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); - break; - } - case 2: { - message.error = $root.google.rpc.Status.decode(reader, reader.uint32()); - break; - } - case 3: { - message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); - break; - } - case 4: { - if (!(message.rowErrors && message.rowErrors.length)) - message.rowErrors = []; - message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); - break; - } - case 5: { - message.writeStream = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendRowsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AppendRowsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AppendRowsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.appendResult != null && message.hasOwnProperty("appendResult")) { - properties.response = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify(message.appendResult); - if (error) - return "appendResult." + error; - } - } - if (message.error != null && message.hasOwnProperty("error")) { - if (properties.response === 1) - return "response: multiple values"; - properties.response = 1; - { - var error = $root.google.rpc.Status.verify(message.error); - if (error) - return "error." + error; - } - } - if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) { - var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.updatedSchema); - if (error) - return "updatedSchema." + error; - } - if (message.rowErrors != null && message.hasOwnProperty("rowErrors")) { - if (!Array.isArray(message.rowErrors)) - return "rowErrors: array expected"; - for (var i = 0; i < message.rowErrors.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.RowError.verify(message.rowErrors[i]); - if (error) - return "rowErrors." + error; - } - } - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - if (!$util.isString(message.writeStream)) - return "writeStream: string expected"; - return null; - }; - - /** - * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse - */ - AppendRowsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); - if (object.appendResult != null) { - if (typeof object.appendResult !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.appendResult: object expected"); - message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.fromObject(object.appendResult); - } - if (object.error != null) { - if (typeof object.error !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.error: object expected"); - message.error = $root.google.rpc.Status.fromObject(object.error); - } - if (object.updatedSchema != null) { - if (typeof object.updatedSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.updatedSchema: object expected"); - message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.updatedSchema); - } - if (object.rowErrors) { - if (!Array.isArray(object.rowErrors)) - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: array expected"); - message.rowErrors = []; - for (var i = 0; i < object.rowErrors.length; ++i) { - if (typeof object.rowErrors[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: object expected"); - message.rowErrors[i] = $root.google.cloud.bigquery.storage.v1.RowError.fromObject(object.rowErrors[i]); - } - } - if (object.writeStream != null) - message.writeStream = String(object.writeStream); - return message; - }; - - /** - * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} message AppendRowsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AppendRowsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.rowErrors = []; - if (options.defaults) { - object.updatedSchema = null; - object.writeStream = ""; - } - if (message.appendResult != null && message.hasOwnProperty("appendResult")) { - object.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.toObject(message.appendResult, options); - if (options.oneofs) - object.response = "appendResult"; - } - if (message.error != null && message.hasOwnProperty("error")) { - object.error = $root.google.rpc.Status.toObject(message.error, options); - if (options.oneofs) - object.response = "error"; - } - if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) - object.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.updatedSchema, options); - if (message.rowErrors && message.rowErrors.length) { - object.rowErrors = []; - for (var j = 0; j < message.rowErrors.length; ++j) - object.rowErrors[j] = $root.google.cloud.bigquery.storage.v1.RowError.toObject(message.rowErrors[j], options); - } - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - object.writeStream = message.writeStream; - return object; - }; - - /** - * Converts this AppendRowsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @instance - * @returns {Object.} JSON object - */ - AppendRowsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AppendRowsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AppendRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse"; - }; - - AppendRowsResponse.AppendResult = (function() { - - /** - * Properties of an AppendResult. - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @interface IAppendResult - * @property {google.protobuf.IInt64Value|null} [offset] AppendResult offset - */ - - /** - * Constructs a new AppendResult. - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse - * @classdesc Represents an AppendResult. - * @implements IAppendResult - * @constructor - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set - */ - function AppendResult(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AppendResult offset. - * @member {google.protobuf.IInt64Value|null|undefined} offset - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @instance - */ - AppendResult.prototype.offset = null; - - /** - * Creates a new AppendResult instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult instance - */ - AppendResult.create = function create(properties) { - return new AppendResult(properties); - }; - - /** - * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendResult.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AppendResult.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AppendResult message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendResult.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AppendResult message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AppendResult.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AppendResult message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AppendResult.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.offset != null && message.hasOwnProperty("offset")) { - var error = $root.google.protobuf.Int64Value.verify(message.offset); - if (error) - return "offset." + error; - } - return null; - }; - - /** - * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult - */ - AppendResult.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); - if (object.offset != null) { - if (typeof object.offset !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.offset: object expected"); - message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); - } - return message; - }; - - /** - * Creates a plain object from an AppendResult message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} message AppendResult - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AppendResult.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.offset = null; - if (message.offset != null && message.hasOwnProperty("offset")) - object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); - return object; - }; - - /** - * Converts this AppendResult to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @instance - * @returns {Object.} JSON object - */ - AppendResult.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AppendResult - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AppendResult.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"; - }; - - return AppendResult; - })(); - - return AppendRowsResponse; - })(); - - v1.GetWriteStreamRequest = (function() { - - /** - * Properties of a GetWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IGetWriteStreamRequest - * @property {string|null} [name] GetWriteStreamRequest name - * @property {google.cloud.bigquery.storage.v1.WriteStreamView|null} [view] GetWriteStreamRequest view - */ - - /** - * Constructs a new GetWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a GetWriteStreamRequest. - * @implements IGetWriteStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set - */ - function GetWriteStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * GetWriteStreamRequest name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @instance - */ - GetWriteStreamRequest.prototype.name = ""; - - /** - * GetWriteStreamRequest view. - * @member {google.cloud.bigquery.storage.v1.WriteStreamView} view - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @instance - */ - GetWriteStreamRequest.prototype.view = 0; - - /** - * Creates a new GetWriteStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest instance - */ - GetWriteStreamRequest.create = function create(properties) { - return new GetWriteStreamRequest(properties); - }; - - /** - * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - GetWriteStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.view != null && Object.hasOwnProperty.call(message, "view")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.view); - return writer; - }; - - /** - * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - GetWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a GetWriteStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - GetWriteStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 3: { - message.view = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - GetWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a GetWriteStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - GetWriteStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.view != null && message.hasOwnProperty("view")) - switch (message.view) { - default: - return "view: enum value expected"; - case 0: - case 1: - case 2: - break; - } - return null; - }; - - /** - * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest - */ - GetWriteStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); - if (object.name != null) - message.name = String(object.name); - switch (object.view) { - default: - if (typeof object.view === "number") { - message.view = object.view; - break; - } - break; - case "WRITE_STREAM_VIEW_UNSPECIFIED": - case 0: - message.view = 0; - break; - case "BASIC": - case 1: - message.view = 1; - break; - case "FULL": - case 2: - message.view = 2; - break; - } - return message; - }; - - /** - * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} message GetWriteStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - GetWriteStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.view = options.enums === String ? "WRITE_STREAM_VIEW_UNSPECIFIED" : 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.view != null && message.hasOwnProperty("view")) - object.view = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] === undefined ? message.view : $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] : message.view; - return object; - }; - - /** - * Converts this GetWriteStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @instance - * @returns {Object.} JSON object - */ - GetWriteStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for GetWriteStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - GetWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.GetWriteStreamRequest"; - }; - - return GetWriteStreamRequest; - })(); - - v1.BatchCommitWriteStreamsRequest = (function() { - - /** - * Properties of a BatchCommitWriteStreamsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IBatchCommitWriteStreamsRequest - * @property {string|null} [parent] BatchCommitWriteStreamsRequest parent - * @property {Array.|null} [writeStreams] BatchCommitWriteStreamsRequest writeStreams - */ - - /** - * Constructs a new BatchCommitWriteStreamsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a BatchCommitWriteStreamsRequest. - * @implements IBatchCommitWriteStreamsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set - */ - function BatchCommitWriteStreamsRequest(properties) { - this.writeStreams = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BatchCommitWriteStreamsRequest parent. - * @member {string} parent - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @instance - */ - BatchCommitWriteStreamsRequest.prototype.parent = ""; - - /** - * BatchCommitWriteStreamsRequest writeStreams. - * @member {Array.} writeStreams - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @instance - */ - BatchCommitWriteStreamsRequest.prototype.writeStreams = $util.emptyArray; - - /** - * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest instance - */ - BatchCommitWriteStreamsRequest.create = function create(properties) { - return new BatchCommitWriteStreamsRequest(properties); - }; - - /** - * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCommitWriteStreamsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.writeStreams != null && message.writeStreams.length) - for (var i = 0; i < message.writeStreams.length; ++i) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.writeStreams[i]); - return writer; - }; - - /** - * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCommitWriteStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCommitWriteStreamsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.parent = reader.string(); - break; - } - case 2: { - if (!(message.writeStreams && message.writeStreams.length)) - message.writeStreams = []; - message.writeStreams.push(reader.string()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCommitWriteStreamsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BatchCommitWriteStreamsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BatchCommitWriteStreamsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.parent != null && message.hasOwnProperty("parent")) - if (!$util.isString(message.parent)) - return "parent: string expected"; - if (message.writeStreams != null && message.hasOwnProperty("writeStreams")) { - if (!Array.isArray(message.writeStreams)) - return "writeStreams: array expected"; - for (var i = 0; i < message.writeStreams.length; ++i) - if (!$util.isString(message.writeStreams[i])) - return "writeStreams: string[] expected"; - } - return null; - }; - - /** - * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest - */ - BatchCommitWriteStreamsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); - if (object.parent != null) - message.parent = String(object.parent); - if (object.writeStreams) { - if (!Array.isArray(object.writeStreams)) - throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.writeStreams: array expected"); - message.writeStreams = []; - for (var i = 0; i < object.writeStreams.length; ++i) - message.writeStreams[i] = String(object.writeStreams[i]); - } - return message; - }; - - /** - * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BatchCommitWriteStreamsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.writeStreams = []; - if (options.defaults) - object.parent = ""; - if (message.parent != null && message.hasOwnProperty("parent")) - object.parent = message.parent; - if (message.writeStreams && message.writeStreams.length) { - object.writeStreams = []; - for (var j = 0; j < message.writeStreams.length; ++j) - object.writeStreams[j] = message.writeStreams[j]; - } - return object; - }; - - /** - * Converts this BatchCommitWriteStreamsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @instance - * @returns {Object.} JSON object - */ - BatchCommitWriteStreamsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BatchCommitWriteStreamsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BatchCommitWriteStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest"; - }; - - return BatchCommitWriteStreamsRequest; - })(); - - v1.BatchCommitWriteStreamsResponse = (function() { - - /** - * Properties of a BatchCommitWriteStreamsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IBatchCommitWriteStreamsResponse - * @property {google.protobuf.ITimestamp|null} [commitTime] BatchCommitWriteStreamsResponse commitTime - * @property {Array.|null} [streamErrors] BatchCommitWriteStreamsResponse streamErrors - */ - - /** - * Constructs a new BatchCommitWriteStreamsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a BatchCommitWriteStreamsResponse. - * @implements IBatchCommitWriteStreamsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set - */ - function BatchCommitWriteStreamsResponse(properties) { - this.streamErrors = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BatchCommitWriteStreamsResponse commitTime. - * @member {google.protobuf.ITimestamp|null|undefined} commitTime - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @instance - */ - BatchCommitWriteStreamsResponse.prototype.commitTime = null; - - /** - * BatchCommitWriteStreamsResponse streamErrors. - * @member {Array.} streamErrors - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @instance - */ - BatchCommitWriteStreamsResponse.prototype.streamErrors = $util.emptyArray; - - /** - * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse instance - */ - BatchCommitWriteStreamsResponse.create = function create(properties) { - return new BatchCommitWriteStreamsResponse(properties); - }; - - /** - * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCommitWriteStreamsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) - $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.streamErrors != null && message.streamErrors.length) - for (var i = 0; i < message.streamErrors.length; ++i) - $root.google.cloud.bigquery.storage.v1.StorageError.encode(message.streamErrors[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCommitWriteStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCommitWriteStreamsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - case 2: { - if (!(message.streamErrors && message.streamErrors.length)) - message.streamErrors = []; - message.streamErrors.push($root.google.cloud.bigquery.storage.v1.StorageError.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCommitWriteStreamsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BatchCommitWriteStreamsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BatchCommitWriteStreamsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.commitTime != null && message.hasOwnProperty("commitTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.commitTime); - if (error) - return "commitTime." + error; - } - if (message.streamErrors != null && message.hasOwnProperty("streamErrors")) { - if (!Array.isArray(message.streamErrors)) - return "streamErrors: array expected"; - for (var i = 0; i < message.streamErrors.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.StorageError.verify(message.streamErrors[i]); - if (error) - return "streamErrors." + error; - } - } - return null; - }; - - /** - * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse - */ - BatchCommitWriteStreamsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); - if (object.commitTime != null) { - if (typeof object.commitTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.commitTime: object expected"); - message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); - } - if (object.streamErrors) { - if (!Array.isArray(object.streamErrors)) - throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: array expected"); - message.streamErrors = []; - for (var i = 0; i < object.streamErrors.length; ++i) { - if (typeof object.streamErrors[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: object expected"); - message.streamErrors[i] = $root.google.cloud.bigquery.storage.v1.StorageError.fromObject(object.streamErrors[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BatchCommitWriteStreamsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streamErrors = []; - if (options.defaults) - object.commitTime = null; - if (message.commitTime != null && message.hasOwnProperty("commitTime")) - object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); - if (message.streamErrors && message.streamErrors.length) { - object.streamErrors = []; - for (var j = 0; j < message.streamErrors.length; ++j) - object.streamErrors[j] = $root.google.cloud.bigquery.storage.v1.StorageError.toObject(message.streamErrors[j], options); - } - return object; - }; - - /** - * Converts this BatchCommitWriteStreamsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @instance - * @returns {Object.} JSON object - */ - BatchCommitWriteStreamsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BatchCommitWriteStreamsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BatchCommitWriteStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse"; - }; - - return BatchCommitWriteStreamsResponse; - })(); - - v1.FinalizeWriteStreamRequest = (function() { - - /** - * Properties of a FinalizeWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IFinalizeWriteStreamRequest - * @property {string|null} [name] FinalizeWriteStreamRequest name - */ - - /** - * Constructs a new FinalizeWriteStreamRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a FinalizeWriteStreamRequest. - * @implements IFinalizeWriteStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set - */ - function FinalizeWriteStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FinalizeWriteStreamRequest name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @instance - */ - FinalizeWriteStreamRequest.prototype.name = ""; - - /** - * Creates a new FinalizeWriteStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest instance - */ - FinalizeWriteStreamRequest.create = function create(properties) { - return new FinalizeWriteStreamRequest(properties); - }; - - /** - * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeWriteStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - return writer; - }; - - /** - * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeWriteStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FinalizeWriteStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FinalizeWriteStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - return null; - }; - - /** - * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest - */ - FinalizeWriteStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); - if (object.name != null) - message.name = String(object.name); - return message; - }; - - /** - * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} message FinalizeWriteStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FinalizeWriteStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.name = ""; - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - return object; - }; - - /** - * Converts this FinalizeWriteStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @instance - * @returns {Object.} JSON object - */ - FinalizeWriteStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FinalizeWriteStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FinalizeWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest"; - }; - - return FinalizeWriteStreamRequest; - })(); - - v1.FinalizeWriteStreamResponse = (function() { - - /** - * Properties of a FinalizeWriteStreamResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IFinalizeWriteStreamResponse - * @property {number|Long|null} [rowCount] FinalizeWriteStreamResponse rowCount - */ - - /** - * Constructs a new FinalizeWriteStreamResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a FinalizeWriteStreamResponse. - * @implements IFinalizeWriteStreamResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set - */ - function FinalizeWriteStreamResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FinalizeWriteStreamResponse rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @instance - */ - FinalizeWriteStreamResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new FinalizeWriteStreamResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse instance - */ - FinalizeWriteStreamResponse.create = function create(properties) { - return new FinalizeWriteStreamResponse(properties); - }; - - /** - * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeWriteStreamResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeWriteStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeWriteStreamResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.rowCount = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeWriteStreamResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FinalizeWriteStreamResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FinalizeWriteStreamResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - return null; - }; - - /** - * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse - */ - FinalizeWriteStreamResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} message FinalizeWriteStreamResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FinalizeWriteStreamResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this FinalizeWriteStreamResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @instance - * @returns {Object.} JSON object - */ - FinalizeWriteStreamResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FinalizeWriteStreamResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FinalizeWriteStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse"; - }; - - return FinalizeWriteStreamResponse; - })(); - - v1.FlushRowsRequest = (function() { - - /** - * Properties of a FlushRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IFlushRowsRequest - * @property {string|null} [writeStream] FlushRowsRequest writeStream - * @property {google.protobuf.IInt64Value|null} [offset] FlushRowsRequest offset - */ - - /** - * Constructs a new FlushRowsRequest. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a FlushRowsRequest. - * @implements IFlushRowsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set - */ - function FlushRowsRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FlushRowsRequest writeStream. - * @member {string} writeStream - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @instance - */ - FlushRowsRequest.prototype.writeStream = ""; - - /** - * FlushRowsRequest offset. - * @member {google.protobuf.IInt64Value|null|undefined} offset - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @instance - */ - FlushRowsRequest.prototype.offset = null; - - /** - * Creates a new FlushRowsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest instance - */ - FlushRowsRequest.create = function create(properties) { - return new FlushRowsRequest(properties); - }; - - /** - * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FlushRowsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FlushRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FlushRowsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FlushRowsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.writeStream = reader.string(); - break; - } - case 2: { - message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FlushRowsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FlushRowsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FlushRowsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - if (!$util.isString(message.writeStream)) - return "writeStream: string expected"; - if (message.offset != null && message.hasOwnProperty("offset")) { - var error = $root.google.protobuf.Int64Value.verify(message.offset); - if (error) - return "offset." + error; - } - return null; - }; - - /** - * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest - */ - FlushRowsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); - if (object.writeStream != null) - message.writeStream = String(object.writeStream); - if (object.offset != null) { - if (typeof object.offset !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.FlushRowsRequest.offset: object expected"); - message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); - } - return message; - }; - - /** - * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1.FlushRowsRequest} message FlushRowsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FlushRowsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.writeStream = ""; - object.offset = null; - } - if (message.writeStream != null && message.hasOwnProperty("writeStream")) - object.writeStream = message.writeStream; - if (message.offset != null && message.hasOwnProperty("offset")) - object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); - return object; - }; - - /** - * Converts this FlushRowsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @instance - * @returns {Object.} JSON object - */ - FlushRowsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FlushRowsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FlushRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsRequest"; - }; - - return FlushRowsRequest; - })(); - - v1.FlushRowsResponse = (function() { - - /** - * Properties of a FlushRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IFlushRowsResponse - * @property {number|Long|null} [offset] FlushRowsResponse offset - */ - - /** - * Constructs a new FlushRowsResponse. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a FlushRowsResponse. - * @implements IFlushRowsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set - */ - function FlushRowsResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FlushRowsResponse offset. - * @member {number|Long} offset - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @instance - */ - FlushRowsResponse.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new FlushRowsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse instance - */ - FlushRowsResponse.create = function create(properties) { - return new FlushRowsResponse(properties); - }; - - /** - * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FlushRowsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.offset); - return writer; - }; - - /** - * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FlushRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FlushRowsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FlushRowsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.offset = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FlushRowsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FlushRowsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FlushRowsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.offset != null && message.hasOwnProperty("offset")) - if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) - return "offset: integer|Long expected"; - return null; - }; - - /** - * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse - */ - FlushRowsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); - if (object.offset != null) - if ($util.Long) - (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; - else if (typeof object.offset === "string") - message.offset = parseInt(object.offset, 10); - else if (typeof object.offset === "number") - message.offset = object.offset; - else if (typeof object.offset === "object") - message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} message FlushRowsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FlushRowsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.offset = options.longs === String ? "0" : 0; - if (message.offset != null && message.hasOwnProperty("offset")) - if (typeof message.offset === "number") - object.offset = options.longs === String ? String(message.offset) : message.offset; - else - object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; - return object; - }; - - /** - * Converts this FlushRowsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @instance - * @returns {Object.} JSON object - */ - FlushRowsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FlushRowsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FlushRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsResponse"; - }; - - return FlushRowsResponse; - })(); - - v1.StorageError = (function() { - - /** - * Properties of a StorageError. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IStorageError - * @property {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null} [code] StorageError code - * @property {string|null} [entity] StorageError entity - * @property {string|null} [errorMessage] StorageError errorMessage - */ - - /** - * Constructs a new StorageError. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a StorageError. - * @implements IStorageError - * @constructor - * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set - */ - function StorageError(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * StorageError code. - * @member {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode} code - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @instance - */ - StorageError.prototype.code = 0; - - /** - * StorageError entity. - * @member {string} entity - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @instance - */ - StorageError.prototype.entity = ""; - - /** - * StorageError errorMessage. - * @member {string} errorMessage - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @instance - */ - StorageError.prototype.errorMessage = ""; - - /** - * Creates a new StorageError instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError instance - */ - StorageError.create = function create(properties) { - return new StorageError(properties); - }; - - /** - * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StorageError.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.code != null && Object.hasOwnProperty.call(message, "code")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); - if (message.entity != null && Object.hasOwnProperty.call(message, "entity")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.entity); - if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.errorMessage); - return writer; - }; - - /** - * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StorageError.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a StorageError message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StorageError.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StorageError(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.code = reader.int32(); - break; - } - case 2: { - message.entity = reader.string(); - break; - } - case 3: { - message.errorMessage = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a StorageError message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StorageError.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a StorageError message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - StorageError.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.code != null && message.hasOwnProperty("code")) - switch (message.code) { - default: - return "code: enum value expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - break; - } - if (message.entity != null && message.hasOwnProperty("entity")) - if (!$util.isString(message.entity)) - return "entity: string expected"; - if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) - if (!$util.isString(message.errorMessage)) - return "errorMessage: string expected"; - return null; - }; - - /** - * Creates a StorageError message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError - */ - StorageError.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.StorageError) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.StorageError(); - switch (object.code) { - default: - if (typeof object.code === "number") { - message.code = object.code; - break; - } - break; - case "STORAGE_ERROR_CODE_UNSPECIFIED": - case 0: - message.code = 0; - break; - case "TABLE_NOT_FOUND": - case 1: - message.code = 1; - break; - case "STREAM_ALREADY_COMMITTED": - case 2: - message.code = 2; - break; - case "STREAM_NOT_FOUND": - case 3: - message.code = 3; - break; - case "INVALID_STREAM_TYPE": - case 4: - message.code = 4; - break; - case "INVALID_STREAM_STATE": - case 5: - message.code = 5; - break; - case "STREAM_FINALIZED": - case 6: - message.code = 6; - break; - case "SCHEMA_MISMATCH_EXTRA_FIELDS": - case 7: - message.code = 7; - break; - case "OFFSET_ALREADY_EXISTS": - case 8: - message.code = 8; - break; - case "OFFSET_OUT_OF_RANGE": - case 9: - message.code = 9; - break; - } - if (object.entity != null) - message.entity = String(object.entity); - if (object.errorMessage != null) - message.errorMessage = String(object.errorMessage); - return message; - }; - - /** - * Creates a plain object from a StorageError message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {google.cloud.bigquery.storage.v1.StorageError} message StorageError - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - StorageError.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.code = options.enums === String ? "STORAGE_ERROR_CODE_UNSPECIFIED" : 0; - object.entity = ""; - object.errorMessage = ""; - } - if (message.code != null && message.hasOwnProperty("code")) - object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] : message.code; - if (message.entity != null && message.hasOwnProperty("entity")) - object.entity = message.entity; - if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) - object.errorMessage = message.errorMessage; - return object; - }; - - /** - * Converts this StorageError to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @instance - * @returns {Object.} JSON object - */ - StorageError.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for StorageError - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.StorageError - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - StorageError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StorageError"; - }; - - /** - * StorageErrorCode enum. - * @name google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode - * @enum {number} - * @property {number} STORAGE_ERROR_CODE_UNSPECIFIED=0 STORAGE_ERROR_CODE_UNSPECIFIED value - * @property {number} TABLE_NOT_FOUND=1 TABLE_NOT_FOUND value - * @property {number} STREAM_ALREADY_COMMITTED=2 STREAM_ALREADY_COMMITTED value - * @property {number} STREAM_NOT_FOUND=3 STREAM_NOT_FOUND value - * @property {number} INVALID_STREAM_TYPE=4 INVALID_STREAM_TYPE value - * @property {number} INVALID_STREAM_STATE=5 INVALID_STREAM_STATE value - * @property {number} STREAM_FINALIZED=6 STREAM_FINALIZED value - * @property {number} SCHEMA_MISMATCH_EXTRA_FIELDS=7 SCHEMA_MISMATCH_EXTRA_FIELDS value - * @property {number} OFFSET_ALREADY_EXISTS=8 OFFSET_ALREADY_EXISTS value - * @property {number} OFFSET_OUT_OF_RANGE=9 OFFSET_OUT_OF_RANGE value - */ - StorageError.StorageErrorCode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STORAGE_ERROR_CODE_UNSPECIFIED"] = 0; - values[valuesById[1] = "TABLE_NOT_FOUND"] = 1; - values[valuesById[2] = "STREAM_ALREADY_COMMITTED"] = 2; - values[valuesById[3] = "STREAM_NOT_FOUND"] = 3; - values[valuesById[4] = "INVALID_STREAM_TYPE"] = 4; - values[valuesById[5] = "INVALID_STREAM_STATE"] = 5; - values[valuesById[6] = "STREAM_FINALIZED"] = 6; - values[valuesById[7] = "SCHEMA_MISMATCH_EXTRA_FIELDS"] = 7; - values[valuesById[8] = "OFFSET_ALREADY_EXISTS"] = 8; - values[valuesById[9] = "OFFSET_OUT_OF_RANGE"] = 9; - return values; - })(); - - return StorageError; - })(); - - v1.RowError = (function() { - - /** - * Properties of a RowError. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IRowError - * @property {number|Long|null} [index] RowError index - * @property {google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null} [code] RowError code - * @property {string|null} [message] RowError message - */ - - /** - * Constructs a new RowError. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a RowError. - * @implements IRowError - * @constructor - * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set - */ - function RowError(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * RowError index. - * @member {number|Long} index - * @memberof google.cloud.bigquery.storage.v1.RowError - * @instance - */ - RowError.prototype.index = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * RowError code. - * @member {google.cloud.bigquery.storage.v1.RowError.RowErrorCode} code - * @memberof google.cloud.bigquery.storage.v1.RowError - * @instance - */ - RowError.prototype.code = 0; - - /** - * RowError message. - * @member {string} message - * @memberof google.cloud.bigquery.storage.v1.RowError - * @instance - */ - RowError.prototype.message = ""; - - /** - * Creates a new RowError instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.RowError} RowError instance - */ - RowError.create = function create(properties) { - return new RowError(properties); - }; - - /** - * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - RowError.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.index != null && Object.hasOwnProperty.call(message, "index")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.index); - if (message.code != null && Object.hasOwnProperty.call(message, "code")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.code); - if (message.message != null && Object.hasOwnProperty.call(message, "message")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.message); - return writer; - }; - - /** - * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - RowError.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a RowError message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.RowError} RowError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - RowError.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.RowError(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.index = reader.int64(); - break; - } - case 2: { - message.code = reader.int32(); - break; - } - case 3: { - message.message = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a RowError message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.RowError} RowError - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - RowError.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a RowError message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - RowError.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.index != null && message.hasOwnProperty("index")) - if (!$util.isInteger(message.index) && !(message.index && $util.isInteger(message.index.low) && $util.isInteger(message.index.high))) - return "index: integer|Long expected"; - if (message.code != null && message.hasOwnProperty("code")) - switch (message.code) { - default: - return "code: enum value expected"; - case 0: - case 1: - break; - } - if (message.message != null && message.hasOwnProperty("message")) - if (!$util.isString(message.message)) - return "message: string expected"; - return null; - }; - - /** - * Creates a RowError message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.RowError} RowError - */ - RowError.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.RowError) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.RowError(); - if (object.index != null) - if ($util.Long) - (message.index = $util.Long.fromValue(object.index)).unsigned = false; - else if (typeof object.index === "string") - message.index = parseInt(object.index, 10); - else if (typeof object.index === "number") - message.index = object.index; - else if (typeof object.index === "object") - message.index = new $util.LongBits(object.index.low >>> 0, object.index.high >>> 0).toNumber(); - switch (object.code) { - default: - if (typeof object.code === "number") { - message.code = object.code; - break; - } - break; - case "ROW_ERROR_CODE_UNSPECIFIED": - case 0: - message.code = 0; - break; - case "FIELDS_ERROR": - case 1: - message.code = 1; - break; - } - if (object.message != null) - message.message = String(object.message); - return message; - }; - - /** - * Creates a plain object from a RowError message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {google.cloud.bigquery.storage.v1.RowError} message RowError - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - RowError.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.index = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.index = options.longs === String ? "0" : 0; - object.code = options.enums === String ? "ROW_ERROR_CODE_UNSPECIFIED" : 0; - object.message = ""; - } - if (message.index != null && message.hasOwnProperty("index")) - if (typeof message.index === "number") - object.index = options.longs === String ? String(message.index) : message.index; - else - object.index = options.longs === String ? $util.Long.prototype.toString.call(message.index) : options.longs === Number ? new $util.LongBits(message.index.low >>> 0, message.index.high >>> 0).toNumber() : message.index; - if (message.code != null && message.hasOwnProperty("code")) - object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] : message.code; - if (message.message != null && message.hasOwnProperty("message")) - object.message = message.message; - return object; - }; - - /** - * Converts this RowError to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.RowError - * @instance - * @returns {Object.} JSON object - */ - RowError.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for RowError - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.RowError - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - RowError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.RowError"; - }; - - /** - * RowErrorCode enum. - * @name google.cloud.bigquery.storage.v1.RowError.RowErrorCode - * @enum {number} - * @property {number} ROW_ERROR_CODE_UNSPECIFIED=0 ROW_ERROR_CODE_UNSPECIFIED value - * @property {number} FIELDS_ERROR=1 FIELDS_ERROR value - */ - RowError.RowErrorCode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "ROW_ERROR_CODE_UNSPECIFIED"] = 0; - values[valuesById[1] = "FIELDS_ERROR"] = 1; - return values; - })(); - - return RowError; - })(); - - /** - * DataFormat enum. - * @name google.cloud.bigquery.storage.v1.DataFormat - * @enum {number} - * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value - * @property {number} AVRO=1 AVRO value - * @property {number} ARROW=2 ARROW value - */ - v1.DataFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; - values[valuesById[1] = "AVRO"] = 1; - values[valuesById[2] = "ARROW"] = 2; - return values; - })(); - - v1.ReadSession = (function() { - - /** - * Properties of a ReadSession. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadSession - * @property {string|null} [name] ReadSession name - * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime - * @property {google.cloud.bigquery.storage.v1.DataFormat|null} [dataFormat] ReadSession dataFormat - * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadSession avroSchema - * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema - * @property {string|null} [table] ReadSession table - * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers - * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions - * @property {Array.|null} [streams] ReadSession streams - * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned - * @property {string|null} [traceId] ReadSession traceId - */ - - /** - * Constructs a new ReadSession. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadSession. - * @implements IReadSession - * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set - */ - function ReadSession(properties) { - this.streams = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadSession name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.name = ""; - - /** - * ReadSession expireTime. - * @member {google.protobuf.ITimestamp|null|undefined} expireTime - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.expireTime = null; - - /** - * ReadSession dataFormat. - * @member {google.cloud.bigquery.storage.v1.DataFormat} dataFormat - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.dataFormat = 0; - - /** - * ReadSession avroSchema. - * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.avroSchema = null; - - /** - * ReadSession arrowSchema. - * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.arrowSchema = null; - - /** - * ReadSession table. - * @member {string} table - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.table = ""; - - /** - * ReadSession tableModifiers. - * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.tableModifiers = null; - - /** - * ReadSession readOptions. - * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null|undefined} readOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.readOptions = null; - - /** - * ReadSession streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.streams = $util.emptyArray; - - /** - * ReadSession estimatedTotalBytesScanned. - * @member {number|Long} estimatedTotalBytesScanned - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * ReadSession traceId. - * @member {string} traceId - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - ReadSession.prototype.traceId = ""; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * ReadSession schema. - * @member {"avroSchema"|"arrowSchema"|undefined} schema - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - */ - Object.defineProperty(ReadSession.prototype, "schema", { - get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new ReadSession instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession instance - */ - ReadSession.create = function create(properties) { - return new ReadSession(properties); - }; - - /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) - $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); - if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) - $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) - $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.table != null && Object.hasOwnProperty.call(message, "table")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) - $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); - if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) - writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); - if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) - writer.uint32(/* id 13, wireType 2 =*/106).string(message.traceId); - return writer; - }; - - /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadSession message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - case 3: { - message.dataFormat = reader.int32(); - break; - } - case 4: { - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); - break; - } - case 5: { - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); - break; - } - case 6: { - message.table = reader.string(); - break; - } - case 7: { - message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); - break; - } - case 8: { - message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); - break; - } - case 10: { - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); - break; - } - case 12: { - message.estimatedTotalBytesScanned = reader.int64(); - break; - } - case 13: { - message.traceId = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadSession message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadSession.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.expireTime); - if (error) - return "expireTime." + error; - } - if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) - switch (message.dataFormat) { - default: - return "dataFormat: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); - if (error) - return "avroSchema." + error; - } - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - if (properties.schema === 1) - return "schema: multiple values"; - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); - if (error) - return "arrowSchema." + error; - } - } - if (message.table != null && message.hasOwnProperty("table")) - if (!$util.isString(message.table)) - return "table: string expected"; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify(message.tableModifiers); - if (error) - return "tableModifiers." + error; - } - if (message.readOptions != null && message.hasOwnProperty("readOptions")) { - var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify(message.readOptions); - if (error) - return "readOptions." + error; - } - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.streams[i]); - if (error) - return "streams." + error; - } - } - if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) - if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) - return "estimatedTotalBytesScanned: integer|Long expected"; - if (message.traceId != null && message.hasOwnProperty("traceId")) - if (!$util.isString(message.traceId)) - return "traceId: string expected"; - return null; - }; - - /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession - */ - ReadSession.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); - if (object.name != null) - message.name = String(object.name); - if (object.expireTime != null) { - if (typeof object.expireTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.expireTime: object expected"); - message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); - } - switch (object.dataFormat) { - default: - if (typeof object.dataFormat === "number") { - message.dataFormat = object.dataFormat; - break; - } - break; - case "DATA_FORMAT_UNSPECIFIED": - case 0: - message.dataFormat = 0; - break; - case "AVRO": - case 1: - message.dataFormat = 1; - break; - case "ARROW": - case 2: - message.dataFormat = 2; - break; - } - if (object.avroSchema != null) { - if (typeof object.avroSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.avroSchema: object expected"); - message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); - } - if (object.arrowSchema != null) { - if (typeof object.arrowSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.arrowSchema: object expected"); - message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); - } - if (object.table != null) - message.table = String(object.table); - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.fromObject(object.tableModifiers); - } - if (object.readOptions != null) { - if (typeof object.readOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.readOptions: object expected"); - message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.fromObject(object.readOptions); - } - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); - } - } - if (object.estimatedTotalBytesScanned != null) - if ($util.Long) - (message.estimatedTotalBytesScanned = $util.Long.fromValue(object.estimatedTotalBytesScanned)).unsigned = false; - else if (typeof object.estimatedTotalBytesScanned === "string") - message.estimatedTotalBytesScanned = parseInt(object.estimatedTotalBytesScanned, 10); - else if (typeof object.estimatedTotalBytesScanned === "number") - message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; - else if (typeof object.estimatedTotalBytesScanned === "object") - message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); - if (object.traceId != null) - message.traceId = String(object.traceId); - return message; - }; - - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession} message ReadSession - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadSession.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streams = []; - if (options.defaults) { - object.name = ""; - object.expireTime = null; - object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; - object.table = ""; - object.tableModifiers = null; - object.readOptions = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; - object.traceId = ""; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) - object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); - if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) - object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] === undefined ? message.dataFormat : $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); - if (options.oneofs) - object.schema = "avroSchema"; - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); - if (options.oneofs) - object.schema = "arrowSchema"; - } - if (message.table != null && message.hasOwnProperty("table")) - object.table = message.table; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.toObject(message.tableModifiers, options); - if (message.readOptions != null && message.hasOwnProperty("readOptions")) - object.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.toObject(message.readOptions, options); - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); - } - if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) - if (typeof message.estimatedTotalBytesScanned === "number") - object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; - else - object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; - if (message.traceId != null && message.hasOwnProperty("traceId")) - object.traceId = message.traceId; - return object; - }; - - /** - * Converts this ReadSession to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @instance - * @returns {Object.} JSON object - */ - ReadSession.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadSession - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession"; - }; - - ReadSession.TableModifiers = (function() { - - /** - * Properties of a TableModifiers. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @interface ITableModifiers - * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime - */ - - /** - * Constructs a new TableModifiers. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @classdesc Represents a TableModifiers. - * @implements ITableModifiers - * @constructor - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set - */ - function TableModifiers(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableModifiers snapshotTime. - * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @instance - */ - TableModifiers.prototype.snapshotTime = null; - - /** - * Creates a new TableModifiers instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers instance - */ - TableModifiers.create = function create(properties) { - return new TableModifiers(properties); - }; - - /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) - $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableModifiers message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableModifiers message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableModifiers.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); - if (error) - return "snapshotTime." + error; - } - return null; - }; - - /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers - */ - TableModifiers.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); - if (object.snapshotTime != null) { - if (typeof object.snapshotTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.snapshotTime: object expected"); - message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); - } - return message; - }; - - /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} message TableModifiers - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableModifiers.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.snapshotTime = null; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) - object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); - return object; - }; - - /** - * Converts this TableModifiers to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @instance - * @returns {Object.} JSON object - */ - TableModifiers.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableModifiers - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"; - }; - - return TableModifiers; - })(); - - ReadSession.TableReadOptions = (function() { - - /** - * Properties of a TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @interface ITableReadOptions - * @property {Array.|null} [selectedFields] TableReadOptions selectedFields - * @property {string|null} [rowRestriction] TableReadOptions rowRestriction - * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions - * @property {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null} [avroSerializationOptions] TableReadOptions avroSerializationOptions - */ - - /** - * Constructs a new TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1.ReadSession - * @classdesc Represents a TableReadOptions. - * @implements ITableReadOptions - * @constructor - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set - */ - function TableReadOptions(properties) { - this.selectedFields = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableReadOptions selectedFields. - * @member {Array.} selectedFields - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.selectedFields = $util.emptyArray; - - /** - * TableReadOptions rowRestriction. - * @member {string} rowRestriction - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.rowRestriction = ""; - - /** - * TableReadOptions arrowSerializationOptions. - * @member {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null|undefined} arrowSerializationOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.arrowSerializationOptions = null; - - /** - * TableReadOptions avroSerializationOptions. - * @member {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null|undefined} avroSerializationOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - TableReadOptions.prototype.avroSerializationOptions = null; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * TableReadOptions outputFormatSerializationOptions. - * @member {"arrowSerializationOptions"|"avroSerializationOptions"|undefined} outputFormatSerializationOptions - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ - Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { - get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions", "avroSerializationOptions"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new TableReadOptions instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions instance - */ - TableReadOptions.create = function create(properties) { - return new TableReadOptions(properties); - }; - - /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReadOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.selectedFields != null && message.selectedFields.length) - for (var i = 0; i < message.selectedFields.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); - if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) - $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.avroSerializationOptions != null && Object.hasOwnProperty.call(message, "avroSerializationOptions")) - $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.encode(message.avroSerializationOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReadOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); - break; - } - case 2: { - message.rowRestriction = reader.string(); - break; - } - case 3: { - message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); - break; - } - case 4: { - message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReadOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableReadOptions message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableReadOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { - if (!Array.isArray(message.selectedFields)) - return "selectedFields: array expected"; - for (var i = 0; i < message.selectedFields.length; ++i) - if (!$util.isString(message.selectedFields[i])) - return "selectedFields: string[] expected"; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - if (!$util.isString(message.rowRestriction)) - return "rowRestriction: string expected"; - if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { - properties.outputFormatSerializationOptions = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify(message.arrowSerializationOptions); - if (error) - return "arrowSerializationOptions." + error; - } - } - if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { - if (properties.outputFormatSerializationOptions === 1) - return "outputFormatSerializationOptions: multiple values"; - properties.outputFormatSerializationOptions = 1; - { - var error = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify(message.avroSerializationOptions); - if (error) - return "avroSerializationOptions." + error; - } - } - return null; - }; - - /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions - */ - TableReadOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); - if (object.selectedFields) { - if (!Array.isArray(object.selectedFields)) - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.selectedFields: array expected"); - message.selectedFields = []; - for (var i = 0; i < object.selectedFields.length; ++i) - message.selectedFields[i] = String(object.selectedFields[i]); - } - if (object.rowRestriction != null) - message.rowRestriction = String(object.rowRestriction); - if (object.arrowSerializationOptions != null) { - if (typeof object.arrowSerializationOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); - message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); - } - if (object.avroSerializationOptions != null) { - if (typeof object.avroSerializationOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.avroSerializationOptions: object expected"); - message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.fromObject(object.avroSerializationOptions); - } - return message; - }; - - /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} message TableReadOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableReadOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.selectedFields = []; - if (options.defaults) - object.rowRestriction = ""; - if (message.selectedFields && message.selectedFields.length) { - object.selectedFields = []; - for (var j = 0; j < message.selectedFields.length; ++j) - object.selectedFields[j] = message.selectedFields[j]; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - object.rowRestriction = message.rowRestriction; - if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { - object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options); - if (options.oneofs) - object.outputFormatSerializationOptions = "arrowSerializationOptions"; - } - if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { - object.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.toObject(message.avroSerializationOptions, options); - if (options.oneofs) - object.outputFormatSerializationOptions = "avroSerializationOptions"; - } - return object; - }; - - /** - * Converts this TableReadOptions to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - * @returns {Object.} JSON object - */ - TableReadOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableReadOptions - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"; - }; - - return TableReadOptions; - })(); - - return ReadSession; - })(); - - v1.ReadStream = (function() { - - /** - * Properties of a ReadStream. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IReadStream - * @property {string|null} [name] ReadStream name - */ - - /** - * Constructs a new ReadStream. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a ReadStream. - * @implements IReadStream - * @constructor - * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set - */ - function ReadStream(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadStream name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @instance - */ - ReadStream.prototype.name = ""; - - /** - * Creates a new ReadStream instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream instance - */ - ReadStream.create = function create(properties) { - return new ReadStream(properties); - }; - - /** - * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadStream.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - return writer; - }; - - /** - * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadStream.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadStream message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadStream.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadStream message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadStream.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadStream message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadStream.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - return null; - }; - - /** - * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream - */ - ReadStream.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadStream) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); - if (object.name != null) - message.name = String(object.name); - return message; - }; - - /** - * Creates a plain object from a ReadStream message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {google.cloud.bigquery.storage.v1.ReadStream} message ReadStream - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadStream.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.name = ""; - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - return object; - }; - - /** - * Converts this ReadStream to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @instance - * @returns {Object.} JSON object - */ - ReadStream.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadStream - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.ReadStream - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadStream"; - }; - - return ReadStream; - })(); - - /** - * WriteStreamView enum. - * @name google.cloud.bigquery.storage.v1.WriteStreamView - * @enum {number} - * @property {number} WRITE_STREAM_VIEW_UNSPECIFIED=0 WRITE_STREAM_VIEW_UNSPECIFIED value - * @property {number} BASIC=1 BASIC value - * @property {number} FULL=2 FULL value - */ - v1.WriteStreamView = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "WRITE_STREAM_VIEW_UNSPECIFIED"] = 0; - values[valuesById[1] = "BASIC"] = 1; - values[valuesById[2] = "FULL"] = 2; - return values; - })(); - - v1.WriteStream = (function() { - - /** - * Properties of a WriteStream. - * @memberof google.cloud.bigquery.storage.v1 - * @interface IWriteStream - * @property {string|null} [name] WriteStream name - * @property {google.cloud.bigquery.storage.v1.WriteStream.Type|null} [type] WriteStream type - * @property {google.protobuf.ITimestamp|null} [createTime] WriteStream createTime - * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime - * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [tableSchema] WriteStream tableSchema - * @property {google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null} [writeMode] WriteStream writeMode - * @property {string|null} [location] WriteStream location - */ - - /** - * Constructs a new WriteStream. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a WriteStream. - * @implements IWriteStream - * @constructor - * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set - */ - function WriteStream(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * WriteStream name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.name = ""; - - /** - * WriteStream type. - * @member {google.cloud.bigquery.storage.v1.WriteStream.Type} type - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.type = 0; - - /** - * WriteStream createTime. - * @member {google.protobuf.ITimestamp|null|undefined} createTime - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.createTime = null; - - /** - * WriteStream commitTime. - * @member {google.protobuf.ITimestamp|null|undefined} commitTime - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.commitTime = null; - - /** - * WriteStream tableSchema. - * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} tableSchema - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.tableSchema = null; - - /** - * WriteStream writeMode. - * @member {google.cloud.bigquery.storage.v1.WriteStream.WriteMode} writeMode - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.writeMode = 0; - - /** - * WriteStream location. - * @member {string} location - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - */ - WriteStream.prototype.location = ""; - - /** - * Creates a new WriteStream instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream instance - */ - WriteStream.create = function create(properties) { - return new WriteStream(properties); - }; - - /** - * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - WriteStream.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); - if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) - $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) - $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.tableSchema != null && Object.hasOwnProperty.call(message, "tableSchema")) - $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.writeMode != null && Object.hasOwnProperty.call(message, "writeMode")) - writer.uint32(/* id 7, wireType 0 =*/56).int32(message.writeMode); - if (message.location != null && Object.hasOwnProperty.call(message, "location")) - writer.uint32(/* id 8, wireType 2 =*/66).string(message.location); - return writer; - }; - - /** - * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - WriteStream.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a WriteStream message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - WriteStream.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.type = reader.int32(); - break; - } - case 3: { - message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - case 4: { - message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - case 5: { - message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); - break; - } - case 7: { - message.writeMode = reader.int32(); - break; - } - case 8: { - message.location = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a WriteStream message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - WriteStream.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a WriteStream message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - WriteStream.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.type != null && message.hasOwnProperty("type")) - switch (message.type) { - default: - return "type: enum value expected"; - case 0: - case 1: - case 2: - case 3: - break; - } - if (message.createTime != null && message.hasOwnProperty("createTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.createTime); - if (error) - return "createTime." + error; - } - if (message.commitTime != null && message.hasOwnProperty("commitTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.commitTime); - if (error) - return "commitTime." + error; - } - if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) { - var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.tableSchema); - if (error) - return "tableSchema." + error; - } - if (message.writeMode != null && message.hasOwnProperty("writeMode")) - switch (message.writeMode) { - default: - return "writeMode: enum value expected"; - case 0: - case 1: - break; - } - if (message.location != null && message.hasOwnProperty("location")) - if (!$util.isString(message.location)) - return "location: string expected"; - return null; - }; - - /** - * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream - */ - WriteStream.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.WriteStream) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); - if (object.name != null) - message.name = String(object.name); - switch (object.type) { - default: - if (typeof object.type === "number") { - message.type = object.type; - break; - } - break; - case "TYPE_UNSPECIFIED": - case 0: - message.type = 0; - break; - case "COMMITTED": - case 1: - message.type = 1; - break; - case "PENDING": - case 2: - message.type = 2; - break; - case "BUFFERED": - case 3: - message.type = 3; - break; - } - if (object.createTime != null) { - if (typeof object.createTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.createTime: object expected"); - message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); - } - if (object.commitTime != null) { - if (typeof object.commitTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.commitTime: object expected"); - message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); - } - if (object.tableSchema != null) { - if (typeof object.tableSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.tableSchema: object expected"); - message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.tableSchema); - } - switch (object.writeMode) { - default: - if (typeof object.writeMode === "number") { - message.writeMode = object.writeMode; - break; - } - break; - case "WRITE_MODE_UNSPECIFIED": - case 0: - message.writeMode = 0; - break; - case "INSERT": - case 1: - message.writeMode = 1; - break; - } - if (object.location != null) - message.location = String(object.location); - return message; - }; - - /** - * Creates a plain object from a WriteStream message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {google.cloud.bigquery.storage.v1.WriteStream} message WriteStream - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - WriteStream.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; - object.createTime = null; - object.commitTime = null; - object.tableSchema = null; - object.writeMode = options.enums === String ? "WRITE_MODE_UNSPECIFIED" : 0; - object.location = ""; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] : message.type; - if (message.createTime != null && message.hasOwnProperty("createTime")) - object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); - if (message.commitTime != null && message.hasOwnProperty("commitTime")) - object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); - if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) - object.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.tableSchema, options); - if (message.writeMode != null && message.hasOwnProperty("writeMode")) - object.writeMode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] === undefined ? message.writeMode : $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] : message.writeMode; - if (message.location != null && message.hasOwnProperty("location")) - object.location = message.location; - return object; - }; - - /** - * Converts this WriteStream to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @instance - * @returns {Object.} JSON object - */ - WriteStream.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for WriteStream - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.WriteStream - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - WriteStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.WriteStream"; - }; - - /** - * Type enum. - * @name google.cloud.bigquery.storage.v1.WriteStream.Type - * @enum {number} - * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value - * @property {number} COMMITTED=1 COMMITTED value - * @property {number} PENDING=2 PENDING value - * @property {number} BUFFERED=3 BUFFERED value - */ - WriteStream.Type = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; - values[valuesById[1] = "COMMITTED"] = 1; - values[valuesById[2] = "PENDING"] = 2; - values[valuesById[3] = "BUFFERED"] = 3; - return values; - })(); - - /** - * WriteMode enum. - * @name google.cloud.bigquery.storage.v1.WriteStream.WriteMode - * @enum {number} - * @property {number} WRITE_MODE_UNSPECIFIED=0 WRITE_MODE_UNSPECIFIED value - * @property {number} INSERT=1 INSERT value - */ - WriteStream.WriteMode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "WRITE_MODE_UNSPECIFIED"] = 0; - values[valuesById[1] = "INSERT"] = 1; - return values; - })(); - - return WriteStream; - })(); - - v1.TableSchema = (function() { - - /** - * Properties of a TableSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ITableSchema - * @property {Array.|null} [fields] TableSchema fields - */ - - /** - * Constructs a new TableSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a TableSchema. - * @implements ITableSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set - */ - function TableSchema(properties) { - this.fields = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableSchema fields. - * @member {Array.} fields - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @instance - */ - TableSchema.prototype.fields = $util.emptyArray; - - /** - * Creates a new TableSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema instance - */ - TableSchema.create = function create(properties) { - return new TableSchema(properties); - }; - - /** - * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.fields != null && message.fields.length) - for (var i = 0; i < message.fields.length; ++i) - $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.fields && message.fields.length)) - message.fields = []; - message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.fields != null && message.hasOwnProperty("fields")) { - if (!Array.isArray(message.fields)) - return "fields: array expected"; - for (var i = 0; i < message.fields.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); - if (error) - return "fields." + error; - } - } - return null; - }; - - /** - * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema - */ - TableSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.TableSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); - if (object.fields) { - if (!Array.isArray(object.fields)) - throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: array expected"); - message.fields = []; - for (var i = 0; i < object.fields.length; ++i) { - if (typeof object.fields[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: object expected"); - message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a TableSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {google.cloud.bigquery.storage.v1.TableSchema} message TableSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.fields = []; - if (message.fields && message.fields.length) { - object.fields = []; - for (var j = 0; j < message.fields.length; ++j) - object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); - } - return object; - }; - - /** - * Converts this TableSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @instance - * @returns {Object.} JSON object - */ - TableSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.TableSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableSchema"; - }; - - return TableSchema; - })(); - - v1.TableFieldSchema = (function() { - - /** - * Properties of a TableFieldSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @interface ITableFieldSchema - * @property {string|null} [name] TableFieldSchema name - * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null} [type] TableFieldSchema type - * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null} [mode] TableFieldSchema mode - * @property {Array.|null} [fields] TableFieldSchema fields - * @property {string|null} [description] TableFieldSchema description - * @property {number|Long|null} [maxLength] TableFieldSchema maxLength - * @property {number|Long|null} [precision] TableFieldSchema precision - * @property {number|Long|null} [scale] TableFieldSchema scale - */ - - /** - * Constructs a new TableFieldSchema. - * @memberof google.cloud.bigquery.storage.v1 - * @classdesc Represents a TableFieldSchema. - * @implements ITableFieldSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set - */ - function TableFieldSchema(properties) { - this.fields = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableFieldSchema name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.name = ""; - - /** - * TableFieldSchema type. - * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Type} type - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.type = 0; - - /** - * TableFieldSchema mode. - * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode} mode - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.mode = 0; - - /** - * TableFieldSchema fields. - * @member {Array.} fields - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.fields = $util.emptyArray; - - /** - * TableFieldSchema description. - * @member {string} description - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.description = ""; - - /** - * TableFieldSchema maxLength. - * @member {number|Long} maxLength - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.maxLength = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * TableFieldSchema precision. - * @member {number|Long} precision - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.precision = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * TableFieldSchema scale. - * @member {number|Long} scale - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - */ - TableFieldSchema.prototype.scale = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new TableFieldSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema instance - */ - TableFieldSchema.create = function create(properties) { - return new TableFieldSchema(properties); - }; - - /** - * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableFieldSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); - if (message.mode != null && Object.hasOwnProperty.call(message, "mode")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.mode); - if (message.fields != null && message.fields.length) - for (var i = 0; i < message.fields.length; ++i) - $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.description != null && Object.hasOwnProperty.call(message, "description")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.description); - if (message.maxLength != null && Object.hasOwnProperty.call(message, "maxLength")) - writer.uint32(/* id 7, wireType 0 =*/56).int64(message.maxLength); - if (message.precision != null && Object.hasOwnProperty.call(message, "precision")) - writer.uint32(/* id 8, wireType 0 =*/64).int64(message.precision); - if (message.scale != null && Object.hasOwnProperty.call(message, "scale")) - writer.uint32(/* id 9, wireType 0 =*/72).int64(message.scale); - return writer; - }; - - /** - * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableFieldSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableFieldSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableFieldSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.type = reader.int32(); - break; - } - case 3: { - message.mode = reader.int32(); - break; - } - case 4: { - if (!(message.fields && message.fields.length)) - message.fields = []; - message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); - break; - } - case 6: { - message.description = reader.string(); - break; - } - case 7: { - message.maxLength = reader.int64(); - break; - } - case 8: { - message.precision = reader.int64(); - break; - } - case 9: { - message.scale = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableFieldSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableFieldSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableFieldSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.type != null && message.hasOwnProperty("type")) - switch (message.type) { - default: - return "type: enum value expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - case 10: - case 11: - case 12: - case 13: - case 14: - case 15: - break; - } - if (message.mode != null && message.hasOwnProperty("mode")) - switch (message.mode) { - default: - return "mode: enum value expected"; - case 0: - case 1: - case 2: - case 3: - break; - } - if (message.fields != null && message.hasOwnProperty("fields")) { - if (!Array.isArray(message.fields)) - return "fields: array expected"; - for (var i = 0; i < message.fields.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); - if (error) - return "fields." + error; - } - } - if (message.description != null && message.hasOwnProperty("description")) - if (!$util.isString(message.description)) - return "description: string expected"; - if (message.maxLength != null && message.hasOwnProperty("maxLength")) - if (!$util.isInteger(message.maxLength) && !(message.maxLength && $util.isInteger(message.maxLength.low) && $util.isInteger(message.maxLength.high))) - return "maxLength: integer|Long expected"; - if (message.precision != null && message.hasOwnProperty("precision")) - if (!$util.isInteger(message.precision) && !(message.precision && $util.isInteger(message.precision.low) && $util.isInteger(message.precision.high))) - return "precision: integer|Long expected"; - if (message.scale != null && message.hasOwnProperty("scale")) - if (!$util.isInteger(message.scale) && !(message.scale && $util.isInteger(message.scale.low) && $util.isInteger(message.scale.high))) - return "scale: integer|Long expected"; - return null; - }; - - /** - * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema - */ - TableFieldSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1.TableFieldSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); - if (object.name != null) - message.name = String(object.name); - switch (object.type) { - default: - if (typeof object.type === "number") { - message.type = object.type; - break; - } - break; - case "TYPE_UNSPECIFIED": - case 0: - message.type = 0; - break; - case "STRING": - case 1: - message.type = 1; - break; - case "INT64": - case 2: - message.type = 2; - break; - case "DOUBLE": - case 3: - message.type = 3; - break; - case "STRUCT": - case 4: - message.type = 4; - break; - case "BYTES": - case 5: - message.type = 5; - break; - case "BOOL": - case 6: - message.type = 6; - break; - case "TIMESTAMP": - case 7: - message.type = 7; - break; - case "DATE": - case 8: - message.type = 8; - break; - case "TIME": - case 9: - message.type = 9; - break; - case "DATETIME": - case 10: - message.type = 10; - break; - case "GEOGRAPHY": - case 11: - message.type = 11; - break; - case "NUMERIC": - case 12: - message.type = 12; - break; - case "BIGNUMERIC": - case 13: - message.type = 13; - break; - case "INTERVAL": - case 14: - message.type = 14; - break; - case "JSON": - case 15: - message.type = 15; - break; - } - switch (object.mode) { - default: - if (typeof object.mode === "number") { - message.mode = object.mode; - break; - } - break; - case "MODE_UNSPECIFIED": - case 0: - message.mode = 0; - break; - case "NULLABLE": - case 1: - message.mode = 1; - break; - case "REQUIRED": - case 2: - message.mode = 2; - break; - case "REPEATED": - case 3: - message.mode = 3; - break; - } - if (object.fields) { - if (!Array.isArray(object.fields)) - throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: array expected"); - message.fields = []; - for (var i = 0; i < object.fields.length; ++i) { - if (typeof object.fields[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: object expected"); - message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); - } - } - if (object.description != null) - message.description = String(object.description); - if (object.maxLength != null) - if ($util.Long) - (message.maxLength = $util.Long.fromValue(object.maxLength)).unsigned = false; - else if (typeof object.maxLength === "string") - message.maxLength = parseInt(object.maxLength, 10); - else if (typeof object.maxLength === "number") - message.maxLength = object.maxLength; - else if (typeof object.maxLength === "object") - message.maxLength = new $util.LongBits(object.maxLength.low >>> 0, object.maxLength.high >>> 0).toNumber(); - if (object.precision != null) - if ($util.Long) - (message.precision = $util.Long.fromValue(object.precision)).unsigned = false; - else if (typeof object.precision === "string") - message.precision = parseInt(object.precision, 10); - else if (typeof object.precision === "number") - message.precision = object.precision; - else if (typeof object.precision === "object") - message.precision = new $util.LongBits(object.precision.low >>> 0, object.precision.high >>> 0).toNumber(); - if (object.scale != null) - if ($util.Long) - (message.scale = $util.Long.fromValue(object.scale)).unsigned = false; - else if (typeof object.scale === "string") - message.scale = parseInt(object.scale, 10); - else if (typeof object.scale === "number") - message.scale = object.scale; - else if (typeof object.scale === "object") - message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {google.cloud.bigquery.storage.v1.TableFieldSchema} message TableFieldSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableFieldSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.fields = []; - if (options.defaults) { - object.name = ""; - object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; - object.mode = options.enums === String ? "MODE_UNSPECIFIED" : 0; - object.description = ""; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.maxLength = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.maxLength = options.longs === String ? "0" : 0; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.precision = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.precision = options.longs === String ? "0" : 0; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.scale = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.scale = options.longs === String ? "0" : 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; - if (message.mode != null && message.hasOwnProperty("mode")) - object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] === undefined ? message.mode : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] : message.mode; - if (message.fields && message.fields.length) { - object.fields = []; - for (var j = 0; j < message.fields.length; ++j) - object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); - } - if (message.description != null && message.hasOwnProperty("description")) - object.description = message.description; - if (message.maxLength != null && message.hasOwnProperty("maxLength")) - if (typeof message.maxLength === "number") - object.maxLength = options.longs === String ? String(message.maxLength) : message.maxLength; - else - object.maxLength = options.longs === String ? $util.Long.prototype.toString.call(message.maxLength) : options.longs === Number ? new $util.LongBits(message.maxLength.low >>> 0, message.maxLength.high >>> 0).toNumber() : message.maxLength; - if (message.precision != null && message.hasOwnProperty("precision")) - if (typeof message.precision === "number") - object.precision = options.longs === String ? String(message.precision) : message.precision; - else - object.precision = options.longs === String ? $util.Long.prototype.toString.call(message.precision) : options.longs === Number ? new $util.LongBits(message.precision.low >>> 0, message.precision.high >>> 0).toNumber() : message.precision; - if (message.scale != null && message.hasOwnProperty("scale")) - if (typeof message.scale === "number") - object.scale = options.longs === String ? String(message.scale) : message.scale; - else - object.scale = options.longs === String ? $util.Long.prototype.toString.call(message.scale) : options.longs === Number ? new $util.LongBits(message.scale.low >>> 0, message.scale.high >>> 0).toNumber() : message.scale; - return object; - }; - - /** - * Converts this TableFieldSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @instance - * @returns {Object.} JSON object - */ - TableFieldSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableFieldSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableFieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableFieldSchema"; - }; - - /** - * Type enum. - * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Type - * @enum {number} - * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value - * @property {number} STRING=1 STRING value - * @property {number} INT64=2 INT64 value - * @property {number} DOUBLE=3 DOUBLE value - * @property {number} STRUCT=4 STRUCT value - * @property {number} BYTES=5 BYTES value - * @property {number} BOOL=6 BOOL value - * @property {number} TIMESTAMP=7 TIMESTAMP value - * @property {number} DATE=8 DATE value - * @property {number} TIME=9 TIME value - * @property {number} DATETIME=10 DATETIME value - * @property {number} GEOGRAPHY=11 GEOGRAPHY value - * @property {number} NUMERIC=12 NUMERIC value - * @property {number} BIGNUMERIC=13 BIGNUMERIC value - * @property {number} INTERVAL=14 INTERVAL value - * @property {number} JSON=15 JSON value - */ - TableFieldSchema.Type = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; - values[valuesById[1] = "STRING"] = 1; - values[valuesById[2] = "INT64"] = 2; - values[valuesById[3] = "DOUBLE"] = 3; - values[valuesById[4] = "STRUCT"] = 4; - values[valuesById[5] = "BYTES"] = 5; - values[valuesById[6] = "BOOL"] = 6; - values[valuesById[7] = "TIMESTAMP"] = 7; - values[valuesById[8] = "DATE"] = 8; - values[valuesById[9] = "TIME"] = 9; - values[valuesById[10] = "DATETIME"] = 10; - values[valuesById[11] = "GEOGRAPHY"] = 11; - values[valuesById[12] = "NUMERIC"] = 12; - values[valuesById[13] = "BIGNUMERIC"] = 13; - values[valuesById[14] = "INTERVAL"] = 14; - values[valuesById[15] = "JSON"] = 15; - return values; - })(); - - /** - * Mode enum. - * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Mode - * @enum {number} - * @property {number} MODE_UNSPECIFIED=0 MODE_UNSPECIFIED value - * @property {number} NULLABLE=1 NULLABLE value - * @property {number} REQUIRED=2 REQUIRED value - * @property {number} REPEATED=3 REPEATED value - */ - TableFieldSchema.Mode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "MODE_UNSPECIFIED"] = 0; - values[valuesById[1] = "NULLABLE"] = 1; - values[valuesById[2] = "REQUIRED"] = 2; - values[valuesById[3] = "REPEATED"] = 3; - return values; - })(); - - return TableFieldSchema; - })(); - - return v1; - })(); - - storage.v1beta1 = (function() { - - /** - * Namespace v1beta1. - * @memberof google.cloud.bigquery.storage - * @namespace - */ - var v1beta1 = {}; - - v1beta1.ArrowSchema = (function() { - - /** - * Properties of an ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IArrowSchema - * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema - */ - - /** - * Constructs a new ArrowSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an ArrowSchema. - * @implements IArrowSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set - */ - function ArrowSchema(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ArrowSchema serializedSchema. - * @member {Uint8Array} serializedSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @instance - */ - ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); - - /** - * Creates a new ArrowSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema instance - */ - ArrowSchema.create = function create(properties) { - return new ArrowSchema(properties); - }; - - /** - * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); - return writer; - }; - - /** - * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedSchema = reader.bytes(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ArrowSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ArrowSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) - return "serializedSchema: buffer expected"; - return null; - }; - - /** - * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema - */ - ArrowSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); - if (object.serializedSchema != null) - if (typeof object.serializedSchema === "string") - $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); - else if (object.serializedSchema.length >= 0) - message.serializedSchema = object.serializedSchema; - return message; - }; - - /** - * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ArrowSchema} message ArrowSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ArrowSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if (options.bytes === String) - object.serializedSchema = ""; - else { - object.serializedSchema = []; - if (options.bytes !== Array) - object.serializedSchema = $util.newBuffer(object.serializedSchema); - } - if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) - object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; - return object; - }; - - /** - * Converts this ArrowSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @instance - * @returns {Object.} JSON object - */ - ArrowSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ArrowSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowSchema"; - }; - - return ArrowSchema; - })(); - - v1beta1.ArrowRecordBatch = (function() { - - /** - * Properties of an ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IArrowRecordBatch - * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch - * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount - */ - - /** - * Constructs a new ArrowRecordBatch. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an ArrowRecordBatch. - * @implements IArrowRecordBatch - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set - */ - function ArrowRecordBatch(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ArrowRecordBatch serializedRecordBatch. - * @member {Uint8Array} serializedRecordBatch - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @instance - */ - ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); - - /** - * ArrowRecordBatch rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @instance - */ - ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new ArrowRecordBatch instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch instance - */ - ArrowRecordBatch.create = function create(properties) { - return new ArrowRecordBatch(properties); - }; - - /** - * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowRecordBatch.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowRecordBatch.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedRecordBatch = reader.bytes(); - break; - } - case 2: { - message.rowCount = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ArrowRecordBatch message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ArrowRecordBatch.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) - return "serializedRecordBatch: buffer expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - return null; - }; - - /** - * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch - */ - ArrowRecordBatch.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); - if (object.serializedRecordBatch != null) - if (typeof object.serializedRecordBatch === "string") - $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); - else if (object.serializedRecordBatch.length >= 0) - message.serializedRecordBatch = object.serializedRecordBatch; - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} message ArrowRecordBatch - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ArrowRecordBatch.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedRecordBatch = ""; - else { - object.serializedRecordBatch = []; - if (options.bytes !== Array) - object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); - } - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) - object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this ArrowRecordBatch to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @instance - * @returns {Object.} JSON object - */ - ArrowRecordBatch.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ArrowRecordBatch - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch"; - }; - - return ArrowRecordBatch; - })(); - - v1beta1.AvroSchema = (function() { - - /** - * Properties of an AvroSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IAvroSchema - * @property {string|null} [schema] AvroSchema schema - */ - - /** - * Constructs a new AvroSchema. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an AvroSchema. - * @implements IAvroSchema - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set - */ - function AvroSchema(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AvroSchema schema. - * @member {string} schema - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @instance - */ - AvroSchema.prototype.schema = ""; - - /** - * Creates a new AvroSchema instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema instance - */ - AvroSchema.create = function create(properties) { - return new AvroSchema(properties); - }; - - /** - * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSchema.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); - return writer; - }; - - /** - * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AvroSchema message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSchema.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.schema = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AvroSchema message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroSchema.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AvroSchema message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AvroSchema.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.schema != null && message.hasOwnProperty("schema")) - if (!$util.isString(message.schema)) - return "schema: string expected"; - return null; - }; - - /** - * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema - */ - AvroSchema.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroSchema) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); - if (object.schema != null) - message.schema = String(object.schema); - return message; - }; - - /** - * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {google.cloud.bigquery.storage.v1beta1.AvroSchema} message AvroSchema - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AvroSchema.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.schema = ""; - if (message.schema != null && message.hasOwnProperty("schema")) - object.schema = message.schema; - return object; - }; - - /** - * Converts this AvroSchema to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @instance - * @returns {Object.} JSON object - */ - AvroSchema.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AvroSchema - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroSchema"; - }; - - return AvroSchema; - })(); - - v1beta1.AvroRows = (function() { - - /** - * Properties of an AvroRows. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IAvroRows - * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows - * @property {number|Long|null} [rowCount] AvroRows rowCount - */ - - /** - * Constructs a new AvroRows. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents an AvroRows. - * @implements IAvroRows - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set - */ - function AvroRows(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * AvroRows serializedBinaryRows. - * @member {Uint8Array} serializedBinaryRows - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @instance - */ - AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); - - /** - * AvroRows rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @instance - */ - AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new AvroRows instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows instance - */ - AvroRows.create = function create(properties) { - return new AvroRows(properties); - }; - - /** - * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroRows.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - AvroRows.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an AvroRows message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroRows.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.serializedBinaryRows = reader.bytes(); - break; - } - case 2: { - message.rowCount = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an AvroRows message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - AvroRows.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an AvroRows message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - AvroRows.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) - return "serializedBinaryRows: buffer expected"; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - return null; - }; - - /** - * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows - */ - AvroRows.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroRows) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); - if (object.serializedBinaryRows != null) - if (typeof object.serializedBinaryRows === "string") - $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); - else if (object.serializedBinaryRows.length >= 0) - message.serializedBinaryRows = object.serializedBinaryRows; - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from an AvroRows message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {google.cloud.bigquery.storage.v1beta1.AvroRows} message AvroRows - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - AvroRows.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if (options.bytes === String) - object.serializedBinaryRows = ""; - else { - object.serializedBinaryRows = []; - if (options.bytes !== Array) - object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); - } - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) - object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this AvroRows to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @instance - * @returns {Object.} JSON object - */ - AvroRows.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for AvroRows - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroRows"; - }; - - return AvroRows; - })(); - - v1beta1.TableReadOptions = (function() { - - /** - * Properties of a TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableReadOptions - * @property {Array.|null} [selectedFields] TableReadOptions selectedFields - * @property {string|null} [rowRestriction] TableReadOptions rowRestriction - */ - - /** - * Constructs a new TableReadOptions. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableReadOptions. - * @implements ITableReadOptions - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set - */ - function TableReadOptions(properties) { - this.selectedFields = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableReadOptions selectedFields. - * @member {Array.} selectedFields - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @instance - */ - TableReadOptions.prototype.selectedFields = $util.emptyArray; - - /** - * TableReadOptions rowRestriction. - * @member {string} rowRestriction - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @instance - */ - TableReadOptions.prototype.rowRestriction = ""; - - /** - * Creates a new TableReadOptions instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions instance - */ - TableReadOptions.create = function create(properties) { - return new TableReadOptions(properties); - }; - - /** - * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReadOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.selectedFields != null && message.selectedFields.length) - for (var i = 0; i < message.selectedFields.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); - if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); - return writer; - }; - - /** - * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReadOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.selectedFields && message.selectedFields.length)) - message.selectedFields = []; - message.selectedFields.push(reader.string()); - break; - } - case 2: { - message.rowRestriction = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReadOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableReadOptions message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableReadOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { - if (!Array.isArray(message.selectedFields)) - return "selectedFields: array expected"; - for (var i = 0; i < message.selectedFields.length; ++i) - if (!$util.isString(message.selectedFields[i])) - return "selectedFields: string[] expected"; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - if (!$util.isString(message.rowRestriction)) - return "rowRestriction: string expected"; - return null; - }; - - /** - * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions - */ - TableReadOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); - if (object.selectedFields) { - if (!Array.isArray(object.selectedFields)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableReadOptions.selectedFields: array expected"); - message.selectedFields = []; - for (var i = 0; i < object.selectedFields.length; ++i) - message.selectedFields[i] = String(object.selectedFields[i]); - } - if (object.rowRestriction != null) - message.rowRestriction = String(object.rowRestriction); - return message; - }; - - /** - * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} message TableReadOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableReadOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.selectedFields = []; - if (options.defaults) - object.rowRestriction = ""; - if (message.selectedFields && message.selectedFields.length) { - object.selectedFields = []; - for (var j = 0; j < message.selectedFields.length; ++j) - object.selectedFields[j] = message.selectedFields[j]; - } - if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) - object.rowRestriction = message.rowRestriction; - return object; - }; - - /** - * Converts this TableReadOptions to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @instance - * @returns {Object.} JSON object - */ - TableReadOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableReadOptions - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReadOptions"; - }; - - return TableReadOptions; - })(); - - v1beta1.BigQueryStorage = (function() { - - /** - * Constructs a new BigQueryStorage service. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BigQueryStorage - * @extends $protobuf.rpc.Service - * @constructor - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - */ - function BigQueryStorage(rpcImpl, requestDelimited, responseDelimited) { - $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); - } - - (BigQueryStorage.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryStorage; - - /** - * Creates new BigQueryStorage service using the specified rpc implementation. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @static - * @param {$protobuf.RPCImpl} rpcImpl RPC implementation - * @param {boolean} [requestDelimited=false] Whether requests are length-delimited - * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - * @returns {BigQueryStorage} RPC service. Useful where requests and/or responses are streamed. - */ - BigQueryStorage.create = function create(rpcImpl, requestDelimited, responseDelimited) { - return new this(rpcImpl, requestDelimited, responseDelimited); - }; - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef CreateReadSessionCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} [response] ReadSession - */ - - /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.createReadSession = function createReadSession(request, callback) { - return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadSession, request, callback); - }, "name", { value: "CreateReadSession" }); - - /** - * Calls CreateReadSession. - * @function createReadSession - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef ReadRowsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} [response] ReadRowsResponse - */ - - /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.readRows = function readRows(request, callback) { - return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, request, callback); - }, "name", { value: "ReadRows" }); - - /** - * Calls ReadRows. - * @function readRows - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef BatchCreateReadSessionStreamsCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} [response] BatchCreateReadSessionStreamsResponse - */ - - /** - * Calls BatchCreateReadSessionStreams. - * @function batchCreateReadSessionStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback} callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.batchCreateReadSessionStreams = function batchCreateReadSessionStreams(request, callback) { - return this.rpcCall(batchCreateReadSessionStreams, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, request, callback); - }, "name", { value: "BatchCreateReadSessionStreams" }); - - /** - * Calls BatchCreateReadSessionStreams. - * @function batchCreateReadSessionStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef FinalizeStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.protobuf.Empty} [response] Empty - */ - - /** - * Calls FinalizeStream. - * @function finalizeStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback} callback Node-style callback called with the error, if any, and Empty - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.finalizeStream = function finalizeStream(request, callback) { - return this.rpcCall(finalizeStream, $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, $root.google.protobuf.Empty, request, callback); - }, "name", { value: "FinalizeStream" }); - - /** - * Calls FinalizeStream. - * @function finalizeStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - /** - * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @typedef SplitReadStreamCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} [response] SplitReadStreamResponse - */ - - /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse - * @returns {undefined} - * @variation 1 - */ - Object.defineProperty(BigQueryStorage.prototype.splitReadStream = function splitReadStream(request, callback) { - return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, request, callback); - }, "name", { value: "SplitReadStream" }); - - /** - * Calls SplitReadStream. - * @function splitReadStream - * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage - * @instance - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object - * @returns {Promise} Promise - * @variation 2 - */ - - return BigQueryStorage; - })(); - - v1beta1.Stream = (function() { - - /** - * Properties of a Stream. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStream - * @property {string|null} [name] Stream name - */ - - /** - * Constructs a new Stream. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a Stream. - * @implements IStream - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set - */ - function Stream(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Stream name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @instance - */ - Stream.prototype.name = ""; - - /** - * Creates a new Stream instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream instance - */ - Stream.create = function create(properties) { - return new Stream(properties); - }; - - /** - * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Stream.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - return writer; - }; - - /** - * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Stream.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Stream message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Stream.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Stream message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Stream.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Stream message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Stream.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - return null; - }; - - /** - * Creates a Stream message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream - */ - Stream.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Stream) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); - if (object.name != null) - message.name = String(object.name); - return message; - }; - - /** - * Creates a plain object from a Stream message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {google.cloud.bigquery.storage.v1beta1.Stream} message Stream - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Stream.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.name = ""; - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - return object; - }; - - /** - * Converts this Stream to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @instance - * @returns {Object.} JSON object - */ - Stream.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Stream - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.Stream - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Stream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Stream"; - }; - - return Stream; - })(); - - v1beta1.StreamPosition = (function() { - - /** - * Properties of a StreamPosition. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStreamPosition - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] StreamPosition stream - * @property {number|Long|null} [offset] StreamPosition offset - */ - - /** - * Constructs a new StreamPosition. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a StreamPosition. - * @implements IStreamPosition - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set - */ - function StreamPosition(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * StreamPosition stream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @instance - */ - StreamPosition.prototype.stream = null; - - /** - * StreamPosition offset. - * @member {number|Long} offset - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @instance - */ - StreamPosition.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new StreamPosition instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition instance - */ - StreamPosition.create = function create(properties) { - return new StreamPosition(properties); - }; - - /** - * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamPosition.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) - writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); - return writer; - }; - - /** - * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamPosition.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a StreamPosition message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamPosition.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - } - case 2: { - message.offset = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a StreamPosition message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamPosition.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a StreamPosition message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - StreamPosition.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.stream != null && message.hasOwnProperty("stream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); - if (error) - return "stream." + error; - } - if (message.offset != null && message.hasOwnProperty("offset")) - if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) - return "offset: integer|Long expected"; - return null; - }; - - /** - * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition - */ - StreamPosition.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamPosition) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); - if (object.stream != null) { - if (typeof object.stream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamPosition.stream: object expected"); - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); - } - if (object.offset != null) - if ($util.Long) - (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; - else if (typeof object.offset === "string") - message.offset = parseInt(object.offset, 10); - else if (typeof object.offset === "number") - message.offset = object.offset; - else if (typeof object.offset === "object") - message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} message StreamPosition - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - StreamPosition.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.stream = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.offset = options.longs === String ? "0" : 0; - } - if (message.stream != null && message.hasOwnProperty("stream")) - object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); - if (message.offset != null && message.hasOwnProperty("offset")) - if (typeof message.offset === "number") - object.offset = options.longs === String ? String(message.offset) : message.offset; - else - object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; - return object; - }; - - /** - * Converts this StreamPosition to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @instance - * @returns {Object.} JSON object - */ - StreamPosition.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for StreamPosition - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - StreamPosition.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamPosition"; - }; - - return StreamPosition; - })(); - - v1beta1.ReadSession = (function() { - - /** - * Properties of a ReadSession. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadSession - * @property {string|null} [name] ReadSession name - * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime - * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadSession avroSchema - * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema - * @property {Array.|null} [streams] ReadSession streams - * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] ReadSession tableReference - * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers - * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] ReadSession shardingStrategy - */ - - /** - * Constructs a new ReadSession. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadSession. - * @implements IReadSession - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set - */ - function ReadSession(properties) { - this.streams = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadSession name. - * @member {string} name - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.name = ""; - - /** - * ReadSession expireTime. - * @member {google.protobuf.ITimestamp|null|undefined} expireTime - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.expireTime = null; - - /** - * ReadSession avroSchema. - * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.avroSchema = null; - - /** - * ReadSession arrowSchema. - * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.arrowSchema = null; - - /** - * ReadSession streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.streams = $util.emptyArray; - - /** - * ReadSession tableReference. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.tableReference = null; - - /** - * ReadSession tableModifiers. - * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.tableModifiers = null; - - /** - * ReadSession shardingStrategy. - * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - ReadSession.prototype.shardingStrategy = 0; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * ReadSession schema. - * @member {"avroSchema"|"arrowSchema"|undefined} schema - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - */ - Object.defineProperty(ReadSession.prototype, "schema", { - get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new ReadSession instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession instance - */ - ReadSession.create = function create(properties) { - return new ReadSession(properties); - }; - - /** - * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) - $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) - $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) - $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) - $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); - return writer; - }; - - /** - * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadSession.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadSession message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - case 5: { - message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); - break; - } - case 6: { - message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); - break; - } - case 4: { - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); - break; - } - case 7: { - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - } - case 8: { - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); - break; - } - case 9: { - message.shardingStrategy = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadSession message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadSession.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadSession message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadSession.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.expireTime); - if (error) - return "expireTime." + error; - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); - if (error) - return "avroSchema." + error; - } - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - if (properties.schema === 1) - return "schema: multiple values"; - properties.schema = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); - if (error) - return "arrowSchema." + error; - } - } - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); - if (error) - return "streams." + error; - } - } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); - if (error) - return "tableReference." + error; - } - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); - if (error) - return "tableModifiers." + error; - } - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - switch (message.shardingStrategy) { - default: - return "shardingStrategy: enum value expected"; - case 0: - case 1: - case 2: - break; - } - return null; - }; - - /** - * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession - */ - ReadSession.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadSession) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); - if (object.name != null) - message.name = String(object.name); - if (object.expireTime != null) { - if (typeof object.expireTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.expireTime: object expected"); - message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); - } - if (object.avroSchema != null) { - if (typeof object.avroSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.avroSchema: object expected"); - message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); - } - if (object.arrowSchema != null) { - if (typeof object.arrowSchema !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.arrowSchema: object expected"); - message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); - } - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); - } - } - if (object.tableReference != null) { - if (typeof object.tableReference !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableReference: object expected"); - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); - } - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); - } - switch (object.shardingStrategy) { - default: - if (typeof object.shardingStrategy === "number") { - message.shardingStrategy = object.shardingStrategy; - break; - } - break; - case "SHARDING_STRATEGY_UNSPECIFIED": - case 0: - message.shardingStrategy = 0; - break; - case "LIQUID": - case 1: - message.shardingStrategy = 1; - break; - case "BALANCED": - case 2: - message.shardingStrategy = 2; - break; - } - return message; - }; - - /** - * Creates a plain object from a ReadSession message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} message ReadSession - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadSession.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streams = []; - if (options.defaults) { - object.name = ""; - object.expireTime = null; - object.tableReference = null; - object.tableModifiers = null; - object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.expireTime != null && message.hasOwnProperty("expireTime")) - object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); - } - if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { - object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); - if (options.oneofs) - object.schema = "avroSchema"; - } - if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { - object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); - if (options.oneofs) - object.schema = "arrowSchema"; - } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) - object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; - return object; - }; - - /** - * Converts this ReadSession to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @instance - * @returns {Object.} JSON object - */ - ReadSession.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadSession - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadSession"; - }; - - return ReadSession; - })(); - - v1beta1.CreateReadSessionRequest = (function() { - - /** - * Properties of a CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ICreateReadSessionRequest - * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] CreateReadSessionRequest tableReference - * @property {string|null} [parent] CreateReadSessionRequest parent - * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] CreateReadSessionRequest tableModifiers - * @property {number|null} [requestedStreams] CreateReadSessionRequest requestedStreams - * @property {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null} [readOptions] CreateReadSessionRequest readOptions - * @property {google.cloud.bigquery.storage.v1beta1.DataFormat|null} [format] CreateReadSessionRequest format - * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] CreateReadSessionRequest shardingStrategy - */ - - /** - * Constructs a new CreateReadSessionRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a CreateReadSessionRequest. - * @implements ICreateReadSessionRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set - */ - function CreateReadSessionRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * CreateReadSessionRequest tableReference. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.tableReference = null; - - /** - * CreateReadSessionRequest parent. - * @member {string} parent - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.parent = ""; - - /** - * CreateReadSessionRequest tableModifiers. - * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.tableModifiers = null; - - /** - * CreateReadSessionRequest requestedStreams. - * @member {number} requestedStreams - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.requestedStreams = 0; - - /** - * CreateReadSessionRequest readOptions. - * @member {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null|undefined} readOptions - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.readOptions = null; - - /** - * CreateReadSessionRequest format. - * @member {google.cloud.bigquery.storage.v1beta1.DataFormat} format - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.format = 0; - - /** - * CreateReadSessionRequest shardingStrategy. - * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - */ - CreateReadSessionRequest.prototype.shardingStrategy = 0; - - /** - * Creates a new CreateReadSessionRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest instance - */ - CreateReadSessionRequest.create = function create(properties) { - return new CreateReadSessionRequest(properties); - }; - - /** - * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateReadSessionRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) - $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) - $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); - if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) - $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.format != null && Object.hasOwnProperty.call(message, "format")) - writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); - if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); - if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) - writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); - return writer; - }; - - /** - * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateReadSessionRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); - break; - } - case 6: { - message.parent = reader.string(); - break; - } - case 2: { - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); - break; - } - case 3: { - message.requestedStreams = reader.int32(); - break; - } - case 4: { - message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); - break; - } - case 5: { - message.format = reader.int32(); - break; - } - case 7: { - message.shardingStrategy = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a CreateReadSessionRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - CreateReadSessionRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.tableReference != null && message.hasOwnProperty("tableReference")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); - if (error) - return "tableReference." + error; - } - if (message.parent != null && message.hasOwnProperty("parent")) - if (!$util.isString(message.parent)) - return "parent: string expected"; - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); - if (error) - return "tableModifiers." + error; - } - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - if (!$util.isInteger(message.requestedStreams)) - return "requestedStreams: integer expected"; - if (message.readOptions != null && message.hasOwnProperty("readOptions")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify(message.readOptions); - if (error) - return "readOptions." + error; - } - if (message.format != null && message.hasOwnProperty("format")) - switch (message.format) { - default: - return "format: enum value expected"; - case 0: - case 1: - case 3: - break; - } - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - switch (message.shardingStrategy) { - default: - return "shardingStrategy: enum value expected"; - case 0: - case 1: - case 2: - break; - } - return null; - }; - - /** - * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest - */ - CreateReadSessionRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); - if (object.tableReference != null) { - if (typeof object.tableReference !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableReference: object expected"); - message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); - } - if (object.parent != null) - message.parent = String(object.parent); - if (object.tableModifiers != null) { - if (typeof object.tableModifiers !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableModifiers: object expected"); - message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); - } - if (object.requestedStreams != null) - message.requestedStreams = object.requestedStreams | 0; - if (object.readOptions != null) { - if (typeof object.readOptions !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.readOptions: object expected"); - message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); - } - switch (object.format) { - default: - if (typeof object.format === "number") { - message.format = object.format; - break; - } - break; - case "DATA_FORMAT_UNSPECIFIED": - case 0: - message.format = 0; - break; - case "AVRO": - case 1: - message.format = 1; - break; - case "ARROW": - case 3: - message.format = 3; - break; - } - switch (object.shardingStrategy) { - default: - if (typeof object.shardingStrategy === "number") { - message.shardingStrategy = object.shardingStrategy; - break; - } - break; - case "SHARDING_STRATEGY_UNSPECIFIED": - case 0: - message.shardingStrategy = 0; - break; - case "LIQUID": - case 1: - message.shardingStrategy = 1; - break; - case "BALANCED": - case 2: - message.shardingStrategy = 2; - break; - } - return message; - }; - - /** - * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} message CreateReadSessionRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - CreateReadSessionRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.tableReference = null; - object.tableModifiers = null; - object.requestedStreams = 0; - object.readOptions = null; - object.format = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; - object.parent = ""; - object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; - } - if (message.tableReference != null && message.hasOwnProperty("tableReference")) - object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); - if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) - object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - object.requestedStreams = message.requestedStreams; - if (message.readOptions != null && message.hasOwnProperty("readOptions")) - object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); - if (message.format != null && message.hasOwnProperty("format")) - object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] === undefined ? message.format : $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; - if (message.parent != null && message.hasOwnProperty("parent")) - object.parent = message.parent; - if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) - object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; - return object; - }; - - /** - * Converts this CreateReadSessionRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @instance - * @returns {Object.} JSON object - */ - CreateReadSessionRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for CreateReadSessionRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest"; - }; - - return CreateReadSessionRequest; - })(); - - /** - * DataFormat enum. - * @name google.cloud.bigquery.storage.v1beta1.DataFormat - * @enum {number} - * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value - * @property {number} AVRO=1 AVRO value - * @property {number} ARROW=3 ARROW value - */ - v1beta1.DataFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; - values[valuesById[1] = "AVRO"] = 1; - values[valuesById[3] = "ARROW"] = 3; - return values; - })(); - - /** - * ShardingStrategy enum. - * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy - * @enum {number} - * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value - * @property {number} LIQUID=1 LIQUID value - * @property {number} BALANCED=2 BALANCED value - */ - v1beta1.ShardingStrategy = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; - values[valuesById[1] = "LIQUID"] = 1; - values[valuesById[2] = "BALANCED"] = 2; - return values; - })(); - - v1beta1.ReadRowsRequest = (function() { - - /** - * Properties of a ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadRowsRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null} [readPosition] ReadRowsRequest readPosition - */ - - /** - * Constructs a new ReadRowsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadRowsRequest. - * @implements IReadRowsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set - */ - function ReadRowsRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadRowsRequest readPosition. - * @member {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null|undefined} readPosition - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @instance - */ - ReadRowsRequest.prototype.readPosition = null; - - /** - * Creates a new ReadRowsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest instance - */ - ReadRowsRequest.create = function create(properties) { - return new ReadRowsRequest(properties); - }; - - /** - * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.readPosition != null && Object.hasOwnProperty.call(message, "readPosition")) - $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadRowsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadRowsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.readPosition != null && message.hasOwnProperty("readPosition")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.verify(message.readPosition); - if (error) - return "readPosition." + error; - } - return null; - }; - - /** - * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest - */ - ReadRowsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); - if (object.readPosition != null) { - if (typeof object.readPosition !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.readPosition: object expected"); - message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.fromObject(object.readPosition); - } - return message; - }; - - /** - * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} message ReadRowsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadRowsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.readPosition = null; - if (message.readPosition != null && message.hasOwnProperty("readPosition")) - object.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.toObject(message.readPosition, options); - return object; - }; - - /** - * Converts this ReadRowsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @instance - * @returns {Object.} JSON object - */ - ReadRowsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadRowsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsRequest"; - }; - - return ReadRowsRequest; - })(); - - v1beta1.StreamStatus = (function() { - - /** - * Properties of a StreamStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IStreamStatus - * @property {number|Long|null} [estimatedRowCount] StreamStatus estimatedRowCount - * @property {number|null} [fractionConsumed] StreamStatus fractionConsumed - * @property {google.cloud.bigquery.storage.v1beta1.IProgress|null} [progress] StreamStatus progress - * @property {boolean|null} [isSplittable] StreamStatus isSplittable - */ - - /** - * Constructs a new StreamStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a StreamStatus. - * @implements IStreamStatus - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set - */ - function StreamStatus(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * StreamStatus estimatedRowCount. - * @member {number|Long} estimatedRowCount - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @instance - */ - StreamStatus.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * StreamStatus fractionConsumed. - * @member {number} fractionConsumed - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @instance - */ - StreamStatus.prototype.fractionConsumed = 0; - - /** - * StreamStatus progress. - * @member {google.cloud.bigquery.storage.v1beta1.IProgress|null|undefined} progress - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @instance - */ - StreamStatus.prototype.progress = null; - - /** - * StreamStatus isSplittable. - * @member {boolean} isSplittable - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @instance - */ - StreamStatus.prototype.isSplittable = false; - - /** - * Creates a new StreamStatus instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus instance - */ - StreamStatus.create = function create(properties) { - return new StreamStatus(properties); - }; - - /** - * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamStatus.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); - if (message.fractionConsumed != null && Object.hasOwnProperty.call(message, "fractionConsumed")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); - if (message.isSplittable != null && Object.hasOwnProperty.call(message, "isSplittable")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); - if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) - $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StreamStatus.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a StreamStatus message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamStatus.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.estimatedRowCount = reader.int64(); - break; - } - case 2: { - message.fractionConsumed = reader.float(); - break; - } - case 4: { - message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); - break; - } - case 3: { - message.isSplittable = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a StreamStatus message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StreamStatus.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a StreamStatus message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - StreamStatus.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) - if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) - return "estimatedRowCount: integer|Long expected"; - if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) - if (typeof message.fractionConsumed !== "number") - return "fractionConsumed: number expected"; - if (message.progress != null && message.hasOwnProperty("progress")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Progress.verify(message.progress); - if (error) - return "progress." + error; - } - if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) - if (typeof message.isSplittable !== "boolean") - return "isSplittable: boolean expected"; - return null; - }; - - /** - * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus - */ - StreamStatus.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamStatus) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); - if (object.estimatedRowCount != null) - if ($util.Long) - (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; - else if (typeof object.estimatedRowCount === "string") - message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); - else if (typeof object.estimatedRowCount === "number") - message.estimatedRowCount = object.estimatedRowCount; - else if (typeof object.estimatedRowCount === "object") - message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); - if (object.fractionConsumed != null) - message.fractionConsumed = Number(object.fractionConsumed); - if (object.progress != null) { - if (typeof object.progress !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamStatus.progress: object expected"); - message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.fromObject(object.progress); - } - if (object.isSplittable != null) - message.isSplittable = Boolean(object.isSplittable); - return message; - }; - - /** - * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.StreamStatus} message StreamStatus - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - StreamStatus.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.estimatedRowCount = options.longs === String ? "0" : 0; - object.fractionConsumed = 0; - object.isSplittable = false; - object.progress = null; - } - if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) - if (typeof message.estimatedRowCount === "number") - object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; - else - object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; - if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) - object.fractionConsumed = options.json && !isFinite(message.fractionConsumed) ? String(message.fractionConsumed) : message.fractionConsumed; - if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) - object.isSplittable = message.isSplittable; - if (message.progress != null && message.hasOwnProperty("progress")) - object.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.toObject(message.progress, options); - return object; - }; - - /** - * Converts this StreamStatus to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @instance - * @returns {Object.} JSON object - */ - StreamStatus.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for StreamStatus - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - StreamStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamStatus"; - }; - - return StreamStatus; - })(); - - v1beta1.Progress = (function() { - - /** - * Properties of a Progress. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IProgress - * @property {number|null} [atResponseStart] Progress atResponseStart - * @property {number|null} [atResponseEnd] Progress atResponseEnd - */ - - /** - * Constructs a new Progress. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a Progress. - * @implements IProgress - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set - */ - function Progress(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Progress atResponseStart. - * @member {number} atResponseStart - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @instance - */ - Progress.prototype.atResponseStart = 0; - - /** - * Progress atResponseEnd. - * @member {number} atResponseEnd - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @instance - */ - Progress.prototype.atResponseEnd = 0; - - /** - * Creates a new Progress instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress instance - */ - Progress.create = function create(properties) { - return new Progress(properties); - }; - - /** - * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Progress.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) - writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); - if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); - return writer; - }; - - /** - * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Progress.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Progress message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Progress.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.atResponseStart = reader.float(); - break; - } - case 2: { - message.atResponseEnd = reader.float(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Progress message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Progress.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Progress message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Progress.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - if (typeof message.atResponseStart !== "number") - return "atResponseStart: number expected"; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - if (typeof message.atResponseEnd !== "number") - return "atResponseEnd: number expected"; - return null; - }; - - /** - * Creates a Progress message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress - */ - Progress.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Progress) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); - if (object.atResponseStart != null) - message.atResponseStart = Number(object.atResponseStart); - if (object.atResponseEnd != null) - message.atResponseEnd = Number(object.atResponseEnd); - return message; - }; - - /** - * Creates a plain object from a Progress message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {google.cloud.bigquery.storage.v1beta1.Progress} message Progress - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Progress.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.atResponseStart = 0; - object.atResponseEnd = 0; - } - if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) - object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; - if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) - object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; - return object; - }; - - /** - * Converts this Progress to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @instance - * @returns {Object.} JSON object - */ - Progress.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Progress - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.Progress - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Progress"; - }; - - return Progress; - })(); - - v1beta1.ThrottleStatus = (function() { - - /** - * Properties of a ThrottleStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IThrottleStatus - * @property {number|null} [throttlePercent] ThrottleStatus throttlePercent - */ - - /** - * Constructs a new ThrottleStatus. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ThrottleStatus. - * @implements IThrottleStatus - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set - */ - function ThrottleStatus(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ThrottleStatus throttlePercent. - * @member {number} throttlePercent - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @instance - */ - ThrottleStatus.prototype.throttlePercent = 0; - - /** - * Creates a new ThrottleStatus instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus instance - */ - ThrottleStatus.create = function create(properties) { - return new ThrottleStatus(properties); - }; - - /** - * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ThrottleStatus.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); - return writer; - }; - - /** - * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ThrottleStatus.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ThrottleStatus message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ThrottleStatus.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.throttlePercent = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ThrottleStatus.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ThrottleStatus message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ThrottleStatus.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - if (!$util.isInteger(message.throttlePercent)) - return "throttlePercent: integer expected"; - return null; - }; - - /** - * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus - */ - ThrottleStatus.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); - if (object.throttlePercent != null) - message.throttlePercent = object.throttlePercent | 0; - return message; - }; - - /** - * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} message ThrottleStatus - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ThrottleStatus.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.throttlePercent = 0; - if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) - object.throttlePercent = message.throttlePercent; - return object; - }; - - /** - * Converts this ThrottleStatus to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @instance - * @returns {Object.} JSON object - */ - ThrottleStatus.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ThrottleStatus - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ThrottleStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ThrottleStatus"; - }; - - return ThrottleStatus; - })(); - - v1beta1.ReadRowsResponse = (function() { - - /** - * Properties of a ReadRowsResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IReadRowsResponse - * @property {google.cloud.bigquery.storage.v1beta1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows - * @property {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch - * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount - * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status - * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus - */ - - /** - * Constructs a new ReadRowsResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a ReadRowsResponse. - * @implements IReadRowsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set - */ - function ReadRowsResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReadRowsResponse avroRows. - * @member {google.cloud.bigquery.storage.v1beta1.IAvroRows|null|undefined} avroRows - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.avroRows = null; - - /** - * ReadRowsResponse arrowRecordBatch. - * @member {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null|undefined} arrowRecordBatch - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.arrowRecordBatch = null; - - /** - * ReadRowsResponse rowCount. - * @member {number|Long} rowCount - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * ReadRowsResponse status. - * @member {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null|undefined} status - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.status = null; - - /** - * ReadRowsResponse throttleStatus. - * @member {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null|undefined} throttleStatus - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - ReadRowsResponse.prototype.throttleStatus = null; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * ReadRowsResponse rows. - * @member {"avroRows"|"arrowRecordBatch"|undefined} rows - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - */ - Object.defineProperty(ReadRowsResponse.prototype, "rows", { - get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new ReadRowsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse instance - */ - ReadRowsResponse.create = function create(properties) { - return new ReadRowsResponse(properties); - }; - - /** - * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.status != null && Object.hasOwnProperty.call(message, "status")) - $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) - $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) - $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.throttleStatus != null && Object.hasOwnProperty.call(message, "throttleStatus")) - $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) - writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); - return writer; - }; - - /** - * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 3: { - message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); - break; - } - case 4: { - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); - break; - } - case 6: { - message.rowCount = reader.int64(); - break; - } - case 2: { - message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); - break; - } - case 5: { - message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReadRowsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReadRowsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.verify(message.avroRows); - if (error) - return "avroRows." + error; - } - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - if (properties.rows === 1) - return "rows: multiple values"; - properties.rows = 1; - { - var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify(message.arrowRecordBatch); - if (error) - return "arrowRecordBatch." + error; - } - } - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) - return "rowCount: integer|Long expected"; - if (message.status != null && message.hasOwnProperty("status")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.verify(message.status); - if (error) - return "status." + error; - } - if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify(message.throttleStatus); - if (error) - return "throttleStatus." + error; - } - return null; - }; - - /** - * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse - */ - ReadRowsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); - if (object.avroRows != null) { - if (typeof object.avroRows !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroRows: object expected"); - message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.fromObject(object.avroRows); - } - if (object.arrowRecordBatch != null) { - if (typeof object.arrowRecordBatch !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowRecordBatch: object expected"); - message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); - } - if (object.rowCount != null) - if ($util.Long) - (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; - else if (typeof object.rowCount === "string") - message.rowCount = parseInt(object.rowCount, 10); - else if (typeof object.rowCount === "number") - message.rowCount = object.rowCount; - else if (typeof object.rowCount === "object") - message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); - if (object.status != null) { - if (typeof object.status !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status: object expected"); - message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.fromObject(object.status); - } - if (object.throttleStatus != null) { - if (typeof object.throttleStatus !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); - message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); - } - return message; - }; - - /** - * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} message ReadRowsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReadRowsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.status = null; - object.throttleStatus = null; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.rowCount = options.longs === String ? "0" : 0; - } - if (message.status != null && message.hasOwnProperty("status")) - object.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.toObject(message.status, options); - if (message.avroRows != null && message.hasOwnProperty("avroRows")) { - object.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.toObject(message.avroRows, options); - if (options.oneofs) - object.rows = "avroRows"; - } - if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { - object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); - if (options.oneofs) - object.rows = "arrowRecordBatch"; - } - if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) - object.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.toObject(message.throttleStatus, options); - if (message.rowCount != null && message.hasOwnProperty("rowCount")) - if (typeof message.rowCount === "number") - object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; - else - object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; - return object; - }; - - /** - * Converts this ReadRowsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @instance - * @returns {Object.} JSON object - */ - ReadRowsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReadRowsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"; - }; - - return ReadRowsResponse; - })(); - - v1beta1.BatchCreateReadSessionStreamsRequest = (function() { - - /** - * Properties of a BatchCreateReadSessionStreamsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IBatchCreateReadSessionStreamsRequest - * @property {google.cloud.bigquery.storage.v1beta1.IReadSession|null} [session] BatchCreateReadSessionStreamsRequest session - * @property {number|null} [requestedStreams] BatchCreateReadSessionStreamsRequest requestedStreams - */ - - /** - * Constructs a new BatchCreateReadSessionStreamsRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BatchCreateReadSessionStreamsRequest. - * @implements IBatchCreateReadSessionStreamsRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set - */ - function BatchCreateReadSessionStreamsRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BatchCreateReadSessionStreamsRequest session. - * @member {google.cloud.bigquery.storage.v1beta1.IReadSession|null|undefined} session - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @instance - */ - BatchCreateReadSessionStreamsRequest.prototype.session = null; - - /** - * BatchCreateReadSessionStreamsRequest requestedStreams. - * @member {number} requestedStreams - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @instance - */ - BatchCreateReadSessionStreamsRequest.prototype.requestedStreams = 0; - - /** - * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest instance - */ - BatchCreateReadSessionStreamsRequest.create = function create(properties) { - return new BatchCreateReadSessionStreamsRequest(properties); - }; - - /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.session != null && Object.hasOwnProperty.call(message, "session")) - $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); - return writer; - }; - - /** - * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCreateReadSessionStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); - break; - } - case 2: { - message.requestedStreams = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCreateReadSessionStreamsRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BatchCreateReadSessionStreamsRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BatchCreateReadSessionStreamsRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.session != null && message.hasOwnProperty("session")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.verify(message.session); - if (error) - return "session." + error; - } - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - if (!$util.isInteger(message.requestedStreams)) - return "requestedStreams: integer expected"; - return null; - }; - - /** - * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest - */ - BatchCreateReadSessionStreamsRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); - if (object.session != null) { - if (typeof object.session !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session: object expected"); - message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.fromObject(object.session); - } - if (object.requestedStreams != null) - message.requestedStreams = object.requestedStreams | 0; - return message; - }; - - /** - * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BatchCreateReadSessionStreamsRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.session = null; - object.requestedStreams = 0; - } - if (message.session != null && message.hasOwnProperty("session")) - object.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.toObject(message.session, options); - if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) - object.requestedStreams = message.requestedStreams; - return object; - }; - - /** - * Converts this BatchCreateReadSessionStreamsRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @instance - * @returns {Object.} JSON object - */ - BatchCreateReadSessionStreamsRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BatchCreateReadSessionStreamsRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BatchCreateReadSessionStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest"; - }; - - return BatchCreateReadSessionStreamsRequest; - })(); - - v1beta1.BatchCreateReadSessionStreamsResponse = (function() { - - /** - * Properties of a BatchCreateReadSessionStreamsResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IBatchCreateReadSessionStreamsResponse - * @property {Array.|null} [streams] BatchCreateReadSessionStreamsResponse streams - */ - - /** - * Constructs a new BatchCreateReadSessionStreamsResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a BatchCreateReadSessionStreamsResponse. - * @implements IBatchCreateReadSessionStreamsResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set - */ - function BatchCreateReadSessionStreamsResponse(properties) { - this.streams = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BatchCreateReadSessionStreamsResponse streams. - * @member {Array.} streams - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @instance - */ - BatchCreateReadSessionStreamsResponse.prototype.streams = $util.emptyArray; - - /** - * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse instance - */ - BatchCreateReadSessionStreamsResponse.create = function create(properties) { - return new BatchCreateReadSessionStreamsResponse(properties); - }; - - /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCreateReadSessionStreamsResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.streams != null && message.streams.length) - for (var i = 0; i < message.streams.length; ++i) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BatchCreateReadSessionStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.streams && message.streams.length)) - message.streams = []; - message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BatchCreateReadSessionStreamsResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BatchCreateReadSessionStreamsResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BatchCreateReadSessionStreamsResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.streams != null && message.hasOwnProperty("streams")) { - if (!Array.isArray(message.streams)) - return "streams: array expected"; - for (var i = 0; i < message.streams.length; ++i) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); - if (error) - return "streams." + error; - } - } - return null; - }; - - /** - * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse - */ - BatchCreateReadSessionStreamsResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); - if (object.streams) { - if (!Array.isArray(object.streams)) - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: array expected"); - message.streams = []; - for (var i = 0; i < object.streams.length; ++i) { - if (typeof object.streams[i] !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: object expected"); - message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BatchCreateReadSessionStreamsResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.streams = []; - if (message.streams && message.streams.length) { - object.streams = []; - for (var j = 0; j < message.streams.length; ++j) - object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); - } - return object; - }; - - /** - * Converts this BatchCreateReadSessionStreamsResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @instance - * @returns {Object.} JSON object - */ - BatchCreateReadSessionStreamsResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BatchCreateReadSessionStreamsResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BatchCreateReadSessionStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"; - }; - - return BatchCreateReadSessionStreamsResponse; - })(); - - v1beta1.FinalizeStreamRequest = (function() { - - /** - * Properties of a FinalizeStreamRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface IFinalizeStreamRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] FinalizeStreamRequest stream - */ - - /** - * Constructs a new FinalizeStreamRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a FinalizeStreamRequest. - * @implements IFinalizeStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set - */ - function FinalizeStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FinalizeStreamRequest stream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @instance - */ - FinalizeStreamRequest.prototype.stream = null; - - /** - * Creates a new FinalizeStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest instance - */ - FinalizeStreamRequest.create = function create(properties) { - return new FinalizeStreamRequest(properties); - }; - - /** - * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FinalizeStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FinalizeStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FinalizeStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FinalizeStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.stream != null && message.hasOwnProperty("stream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); - if (error) - return "stream." + error; - } - return null; - }; - - /** - * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest - */ - FinalizeStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); - if (object.stream != null) { - if (typeof object.stream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream: object expected"); - message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); - } - return message; - }; - - /** - * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} message FinalizeStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FinalizeStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.stream = null; - if (message.stream != null && message.hasOwnProperty("stream")) - object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); - return object; - }; - - /** - * Converts this FinalizeStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @instance - * @returns {Object.} JSON object - */ - FinalizeStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FinalizeStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FinalizeStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest"; - }; - - return FinalizeStreamRequest; - })(); - - v1beta1.SplitReadStreamRequest = (function() { - - /** - * Properties of a SplitReadStreamRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ISplitReadStreamRequest - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [originalStream] SplitReadStreamRequest originalStream - * @property {number|null} [fraction] SplitReadStreamRequest fraction - */ - - /** - * Constructs a new SplitReadStreamRequest. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a SplitReadStreamRequest. - * @implements ISplitReadStreamRequest - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set - */ - function SplitReadStreamRequest(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SplitReadStreamRequest originalStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} originalStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @instance - */ - SplitReadStreamRequest.prototype.originalStream = null; - - /** - * SplitReadStreamRequest fraction. - * @member {number} fraction - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @instance - */ - SplitReadStreamRequest.prototype.fraction = 0; - - /** - * Creates a new SplitReadStreamRequest instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest instance - */ - SplitReadStreamRequest.create = function create(properties) { - return new SplitReadStreamRequest(properties); - }; - - /** - * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamRequest.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.originalStream != null && Object.hasOwnProperty.call(message, "originalStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) - writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); - return writer; - }; - - /** - * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamRequest.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - } - case 2: { - message.fraction = reader.float(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SplitReadStreamRequest message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SplitReadStreamRequest.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.originalStream != null && message.hasOwnProperty("originalStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.originalStream); - if (error) - return "originalStream." + error; - } - if (message.fraction != null && message.hasOwnProperty("fraction")) - if (typeof message.fraction !== "number") - return "fraction: number expected"; - return null; - }; - - /** - * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest - */ - SplitReadStreamRequest.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); - if (object.originalStream != null) { - if (typeof object.originalStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.originalStream: object expected"); - message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.originalStream); - } - if (object.fraction != null) - message.fraction = Number(object.fraction); - return message; - }; - - /** - * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} message SplitReadStreamRequest - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SplitReadStreamRequest.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.originalStream = null; - object.fraction = 0; - } - if (message.originalStream != null && message.hasOwnProperty("originalStream")) - object.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.originalStream, options); - if (message.fraction != null && message.hasOwnProperty("fraction")) - object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; - return object; - }; - - /** - * Converts this SplitReadStreamRequest to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @instance - * @returns {Object.} JSON object - */ - SplitReadStreamRequest.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SplitReadStreamRequest - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest"; - }; - - return SplitReadStreamRequest; - })(); - - v1beta1.SplitReadStreamResponse = (function() { - - /** - * Properties of a SplitReadStreamResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ISplitReadStreamResponse - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [primaryStream] SplitReadStreamResponse primaryStream - * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [remainderStream] SplitReadStreamResponse remainderStream - */ - - /** - * Constructs a new SplitReadStreamResponse. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a SplitReadStreamResponse. - * @implements ISplitReadStreamResponse - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set - */ - function SplitReadStreamResponse(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SplitReadStreamResponse primaryStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} primaryStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @instance - */ - SplitReadStreamResponse.prototype.primaryStream = null; - - /** - * SplitReadStreamResponse remainderStream. - * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} remainderStream - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @instance - */ - SplitReadStreamResponse.prototype.remainderStream = null; - - /** - * Creates a new SplitReadStreamResponse instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse instance - */ - SplitReadStreamResponse.create = function create(properties) { - return new SplitReadStreamResponse(properties); - }; - - /** - * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamResponse.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) - $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamResponse.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - } - case 2: { - message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SplitReadStreamResponse message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SplitReadStreamResponse.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.primaryStream); - if (error) - return "primaryStream." + error; - } - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { - var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.remainderStream); - if (error) - return "remainderStream." + error; - } - return null; - }; - - /** - * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse - */ - SplitReadStreamResponse.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); - if (object.primaryStream != null) { - if (typeof object.primaryStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primaryStream: object expected"); - message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.primaryStream); - } - if (object.remainderStream != null) { - if (typeof object.remainderStream !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainderStream: object expected"); - message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.remainderStream); - } - return message; - }; - - /** - * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} message SplitReadStreamResponse - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SplitReadStreamResponse.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.primaryStream = null; - object.remainderStream = null; - } - if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) - object.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.primaryStream, options); - if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) - object.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.remainderStream, options); - return object; - }; - - /** - * Converts this SplitReadStreamResponse to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @instance - * @returns {Object.} JSON object - */ - SplitReadStreamResponse.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SplitReadStreamResponse - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"; - }; - - return SplitReadStreamResponse; - })(); - - v1beta1.TableReference = (function() { - - /** - * Properties of a TableReference. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableReference - * @property {string|null} [projectId] TableReference projectId - * @property {string|null} [datasetId] TableReference datasetId - * @property {string|null} [tableId] TableReference tableId - */ - - /** - * Constructs a new TableReference. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableReference. - * @implements ITableReference - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set - */ - function TableReference(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableReference projectId. - * @member {string} projectId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @instance - */ - TableReference.prototype.projectId = ""; - - /** - * TableReference datasetId. - * @member {string} datasetId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @instance - */ - TableReference.prototype.datasetId = ""; - - /** - * TableReference tableId. - * @member {string} tableId - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @instance - */ - TableReference.prototype.tableId = ""; - - /** - * Creates a new TableReference instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference instance - */ - TableReference.create = function create(properties) { - return new TableReference(properties); - }; - - /** - * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReference.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.projectId != null && Object.hasOwnProperty.call(message, "projectId")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); - if (message.datasetId != null && Object.hasOwnProperty.call(message, "datasetId")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); - if (message.tableId != null && Object.hasOwnProperty.call(message, "tableId")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); - return writer; - }; - - /** - * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableReference.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableReference message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReference.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.projectId = reader.string(); - break; - } - case 2: { - message.datasetId = reader.string(); - break; - } - case 3: { - message.tableId = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableReference message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableReference.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableReference message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableReference.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.projectId != null && message.hasOwnProperty("projectId")) - if (!$util.isString(message.projectId)) - return "projectId: string expected"; - if (message.datasetId != null && message.hasOwnProperty("datasetId")) - if (!$util.isString(message.datasetId)) - return "datasetId: string expected"; - if (message.tableId != null && message.hasOwnProperty("tableId")) - if (!$util.isString(message.tableId)) - return "tableId: string expected"; - return null; - }; - - /** - * Creates a TableReference message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference - */ - TableReference.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReference) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); - if (object.projectId != null) - message.projectId = String(object.projectId); - if (object.datasetId != null) - message.datasetId = String(object.datasetId); - if (object.tableId != null) - message.tableId = String(object.tableId); - return message; - }; - - /** - * Creates a plain object from a TableReference message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableReference} message TableReference - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableReference.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.projectId = ""; - object.datasetId = ""; - object.tableId = ""; - } - if (message.projectId != null && message.hasOwnProperty("projectId")) - object.projectId = message.projectId; - if (message.datasetId != null && message.hasOwnProperty("datasetId")) - object.datasetId = message.datasetId; - if (message.tableId != null && message.hasOwnProperty("tableId")) - object.tableId = message.tableId; - return object; - }; - - /** - * Converts this TableReference to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @instance - * @returns {Object.} JSON object - */ - TableReference.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableReference - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.TableReference - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReference"; - }; - - return TableReference; - })(); - - v1beta1.TableModifiers = (function() { - - /** - * Properties of a TableModifiers. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @interface ITableModifiers - * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime - */ - - /** - * Constructs a new TableModifiers. - * @memberof google.cloud.bigquery.storage.v1beta1 - * @classdesc Represents a TableModifiers. - * @implements ITableModifiers - * @constructor - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set - */ - function TableModifiers(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * TableModifiers snapshotTime. - * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @instance - */ - TableModifiers.prototype.snapshotTime = null; - - /** - * Creates a new TableModifiers instance using the specified properties. - * @function create - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers instance - */ - TableModifiers.create = function create(properties) { - return new TableModifiers(properties); - }; - - /** - * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @function encode - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) - $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. - * @function encodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a TableModifiers message from the specified reader or buffer. - * @function decode - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a TableModifiers message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - TableModifiers.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a TableModifiers message. - * @function verify - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - TableModifiers.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { - var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); - if (error) - return "snapshotTime." + error; - } - return null; - }; - - /** - * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {Object.} object Plain object - * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers - */ - TableModifiers.fromObject = function fromObject(object) { - if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableModifiers) - return object; - var message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); - if (object.snapshotTime != null) { - if (typeof object.snapshotTime !== "object") - throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshotTime: object expected"); - message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); - } - return message; - }; - - /** - * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. - * @function toObject - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} message TableModifiers - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - TableModifiers.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.snapshotTime = null; - if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) - object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); - return object; - }; - - /** - * Converts this TableModifiers to JSON. - * @function toJSON - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @instance - * @returns {Object.} JSON object - */ - TableModifiers.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for TableModifiers - * @function getTypeUrl - * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableModifiers"; - }; - - return TableModifiers; - })(); - - return v1beta1; - })(); - - return storage; - })(); - - return bigquery; - })(); - - return cloud; - })(); - - google.protobuf = (function() { - - /** - * Namespace protobuf. - * @memberof google - * @namespace - */ - var protobuf = {}; - - protobuf.FileDescriptorSet = (function() { - - /** - * Properties of a FileDescriptorSet. - * @memberof google.protobuf - * @interface IFileDescriptorSet - * @property {Array.|null} [file] FileDescriptorSet file - */ - - /** - * Constructs a new FileDescriptorSet. - * @memberof google.protobuf - * @classdesc Represents a FileDescriptorSet. - * @implements IFileDescriptorSet - * @constructor - * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set - */ - function FileDescriptorSet(properties) { - this.file = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FileDescriptorSet file. - * @member {Array.} file - * @memberof google.protobuf.FileDescriptorSet - * @instance - */ - FileDescriptorSet.prototype.file = $util.emptyArray; - - /** - * Creates a new FileDescriptorSet instance using the specified properties. - * @function create - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet instance - */ - FileDescriptorSet.create = function create(properties) { - return new FileDescriptorSet(properties); - }; - - /** - * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileDescriptorSet.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.file != null && message.file.length) - for (var i = 0; i < message.file.length; ++i) - $root.google.protobuf.FileDescriptorProto.encode(message.file[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileDescriptorSet.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FileDescriptorSet message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileDescriptorSet.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.file && message.file.length)) - message.file = []; - message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileDescriptorSet.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FileDescriptorSet message. - * @function verify - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FileDescriptorSet.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.file != null && message.hasOwnProperty("file")) { - if (!Array.isArray(message.file)) - return "file: array expected"; - for (var i = 0; i < message.file.length; ++i) { - var error = $root.google.protobuf.FileDescriptorProto.verify(message.file[i]); - if (error) - return "file." + error; - } - } - return null; - }; - - /** - * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet - */ - FileDescriptorSet.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileDescriptorSet) - return object; - var message = new $root.google.protobuf.FileDescriptorSet(); - if (object.file) { - if (!Array.isArray(object.file)) - throw TypeError(".google.protobuf.FileDescriptorSet.file: array expected"); - message.file = []; - for (var i = 0; i < object.file.length; ++i) { - if (typeof object.file[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorSet.file: object expected"); - message.file[i] = $root.google.protobuf.FileDescriptorProto.fromObject(object.file[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {google.protobuf.FileDescriptorSet} message FileDescriptorSet - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FileDescriptorSet.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.file = []; - if (message.file && message.file.length) { - object.file = []; - for (var j = 0; j < message.file.length; ++j) - object.file[j] = $root.google.protobuf.FileDescriptorProto.toObject(message.file[j], options); - } - return object; - }; - - /** - * Converts this FileDescriptorSet to JSON. - * @function toJSON - * @memberof google.protobuf.FileDescriptorSet - * @instance - * @returns {Object.} JSON object - */ - FileDescriptorSet.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FileDescriptorSet - * @function getTypeUrl - * @memberof google.protobuf.FileDescriptorSet - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FileDescriptorSet.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FileDescriptorSet"; - }; - - return FileDescriptorSet; - })(); - - protobuf.FileDescriptorProto = (function() { - - /** - * Properties of a FileDescriptorProto. - * @memberof google.protobuf - * @interface IFileDescriptorProto - * @property {string|null} [name] FileDescriptorProto name - * @property {string|null} ["package"] FileDescriptorProto package - * @property {Array.|null} [dependency] FileDescriptorProto dependency - * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency - * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency - * @property {Array.|null} [messageType] FileDescriptorProto messageType - * @property {Array.|null} [enumType] FileDescriptorProto enumType - * @property {Array.|null} [service] FileDescriptorProto service - * @property {Array.|null} [extension] FileDescriptorProto extension - * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options - * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo - * @property {string|null} [syntax] FileDescriptorProto syntax - * @property {string|null} [edition] FileDescriptorProto edition - */ - - /** - * Constructs a new FileDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a FileDescriptorProto. - * @implements IFileDescriptorProto - * @constructor - * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set - */ - function FileDescriptorProto(properties) { - this.dependency = []; - this.publicDependency = []; - this.weakDependency = []; - this.messageType = []; - this.enumType = []; - this.service = []; - this.extension = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FileDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.name = ""; - - /** - * FileDescriptorProto package. - * @member {string} package - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype["package"] = ""; - - /** - * FileDescriptorProto dependency. - * @member {Array.} dependency - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.dependency = $util.emptyArray; - - /** - * FileDescriptorProto publicDependency. - * @member {Array.} publicDependency - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.publicDependency = $util.emptyArray; - - /** - * FileDescriptorProto weakDependency. - * @member {Array.} weakDependency - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.weakDependency = $util.emptyArray; - - /** - * FileDescriptorProto messageType. - * @member {Array.} messageType - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.messageType = $util.emptyArray; - - /** - * FileDescriptorProto enumType. - * @member {Array.} enumType - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.enumType = $util.emptyArray; - - /** - * FileDescriptorProto service. - * @member {Array.} service - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.service = $util.emptyArray; - - /** - * FileDescriptorProto extension. - * @member {Array.} extension - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.extension = $util.emptyArray; - - /** - * FileDescriptorProto options. - * @member {google.protobuf.IFileOptions|null|undefined} options - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.options = null; - - /** - * FileDescriptorProto sourceCodeInfo. - * @member {google.protobuf.ISourceCodeInfo|null|undefined} sourceCodeInfo - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.sourceCodeInfo = null; - - /** - * FileDescriptorProto syntax. - * @member {string} syntax - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.syntax = ""; - - /** - * FileDescriptorProto edition. - * @member {string} edition - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.edition = ""; - - /** - * Creates a new FileDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto instance - */ - FileDescriptorProto.create = function create(properties) { - return new FileDescriptorProto(properties); - }; - - /** - * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); - if (message.dependency != null && message.dependency.length) - for (var i = 0; i < message.dependency.length; ++i) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.dependency[i]); - if (message.messageType != null && message.messageType.length) - for (var i = 0; i < message.messageType.length; ++i) - $root.google.protobuf.DescriptorProto.encode(message.messageType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.enumType != null && message.enumType.length) - for (var i = 0; i < message.enumType.length; ++i) - $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.service != null && message.service.length) - for (var i = 0; i < message.service.length; ++i) - $root.google.protobuf.ServiceDescriptorProto.encode(message.service[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.extension != null && message.extension.length) - for (var i = 0; i < message.extension.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) - $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.publicDependency != null && message.publicDependency.length) - for (var i = 0; i < message.publicDependency.length; ++i) - writer.uint32(/* id 10, wireType 0 =*/80).int32(message.publicDependency[i]); - if (message.weakDependency != null && message.weakDependency.length) - for (var i = 0; i < message.weakDependency.length; ++i) - writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); - if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) - writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); - if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) - writer.uint32(/* id 13, wireType 2 =*/106).string(message.edition); - return writer; - }; - - /** - * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FileDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message["package"] = reader.string(); - break; - } - case 3: { - if (!(message.dependency && message.dependency.length)) - message.dependency = []; - message.dependency.push(reader.string()); - break; - } - case 10: { - if (!(message.publicDependency && message.publicDependency.length)) - message.publicDependency = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.publicDependency.push(reader.int32()); - } else - message.publicDependency.push(reader.int32()); - break; - } - case 11: { - if (!(message.weakDependency && message.weakDependency.length)) - message.weakDependency = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.weakDependency.push(reader.int32()); - } else - message.weakDependency.push(reader.int32()); - break; - } - case 4: { - if (!(message.messageType && message.messageType.length)) - message.messageType = []; - message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - } - case 5: { - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 6: { - if (!(message.service && message.service.length)) - message.service = []; - message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 7: { - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 8: { - message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); - break; - } - case 9: { - message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); - break; - } - case 12: { - message.syntax = reader.string(); - break; - } - case 13: { - message.edition = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FileDescriptorProto message. - * @function verify - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FileDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message["package"] != null && message.hasOwnProperty("package")) - if (!$util.isString(message["package"])) - return "package: string expected"; - if (message.dependency != null && message.hasOwnProperty("dependency")) { - if (!Array.isArray(message.dependency)) - return "dependency: array expected"; - for (var i = 0; i < message.dependency.length; ++i) - if (!$util.isString(message.dependency[i])) - return "dependency: string[] expected"; - } - if (message.publicDependency != null && message.hasOwnProperty("publicDependency")) { - if (!Array.isArray(message.publicDependency)) - return "publicDependency: array expected"; - for (var i = 0; i < message.publicDependency.length; ++i) - if (!$util.isInteger(message.publicDependency[i])) - return "publicDependency: integer[] expected"; - } - if (message.weakDependency != null && message.hasOwnProperty("weakDependency")) { - if (!Array.isArray(message.weakDependency)) - return "weakDependency: array expected"; - for (var i = 0; i < message.weakDependency.length; ++i) - if (!$util.isInteger(message.weakDependency[i])) - return "weakDependency: integer[] expected"; - } - if (message.messageType != null && message.hasOwnProperty("messageType")) { - if (!Array.isArray(message.messageType)) - return "messageType: array expected"; - for (var i = 0; i < message.messageType.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.verify(message.messageType[i]); - if (error) - return "messageType." + error; - } - } - if (message.enumType != null && message.hasOwnProperty("enumType")) { - if (!Array.isArray(message.enumType)) - return "enumType: array expected"; - for (var i = 0; i < message.enumType.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); - if (error) - return "enumType." + error; - } - } - if (message.service != null && message.hasOwnProperty("service")) { - if (!Array.isArray(message.service)) - return "service: array expected"; - for (var i = 0; i < message.service.length; ++i) { - var error = $root.google.protobuf.ServiceDescriptorProto.verify(message.service[i]); - if (error) - return "service." + error; - } - } - if (message.extension != null && message.hasOwnProperty("extension")) { - if (!Array.isArray(message.extension)) - return "extension: array expected"; - for (var i = 0; i < message.extension.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); - if (error) - return "extension." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.FileOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) { - var error = $root.google.protobuf.SourceCodeInfo.verify(message.sourceCodeInfo); - if (error) - return "sourceCodeInfo." + error; - } - if (message.syntax != null && message.hasOwnProperty("syntax")) - if (!$util.isString(message.syntax)) - return "syntax: string expected"; - if (message.edition != null && message.hasOwnProperty("edition")) - if (!$util.isString(message.edition)) - return "edition: string expected"; - return null; - }; - - /** - * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto - */ - FileDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileDescriptorProto) - return object; - var message = new $root.google.protobuf.FileDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object["package"] != null) - message["package"] = String(object["package"]); - if (object.dependency) { - if (!Array.isArray(object.dependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.dependency: array expected"); - message.dependency = []; - for (var i = 0; i < object.dependency.length; ++i) - message.dependency[i] = String(object.dependency[i]); - } - if (object.publicDependency) { - if (!Array.isArray(object.publicDependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.publicDependency: array expected"); - message.publicDependency = []; - for (var i = 0; i < object.publicDependency.length; ++i) - message.publicDependency[i] = object.publicDependency[i] | 0; - } - if (object.weakDependency) { - if (!Array.isArray(object.weakDependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.weakDependency: array expected"); - message.weakDependency = []; - for (var i = 0; i < object.weakDependency.length; ++i) - message.weakDependency[i] = object.weakDependency[i] | 0; - } - if (object.messageType) { - if (!Array.isArray(object.messageType)) - throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); - message.messageType = []; - for (var i = 0; i < object.messageType.length; ++i) { - if (typeof object.messageType[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.messageType: object expected"); - message.messageType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.messageType[i]); - } - } - if (object.enumType) { - if (!Array.isArray(object.enumType)) - throw TypeError(".google.protobuf.FileDescriptorProto.enumType: array expected"); - message.enumType = []; - for (var i = 0; i < object.enumType.length; ++i) { - if (typeof object.enumType[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.enumType: object expected"); - message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); - } - } - if (object.service) { - if (!Array.isArray(object.service)) - throw TypeError(".google.protobuf.FileDescriptorProto.service: array expected"); - message.service = []; - for (var i = 0; i < object.service.length; ++i) { - if (typeof object.service[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.service: object expected"); - message.service[i] = $root.google.protobuf.ServiceDescriptorProto.fromObject(object.service[i]); - } - } - if (object.extension) { - if (!Array.isArray(object.extension)) - throw TypeError(".google.protobuf.FileDescriptorProto.extension: array expected"); - message.extension = []; - for (var i = 0; i < object.extension.length; ++i) { - if (typeof object.extension[i] !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.extension: object expected"); - message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.FileOptions.fromObject(object.options); - } - if (object.sourceCodeInfo != null) { - if (typeof object.sourceCodeInfo !== "object") - throw TypeError(".google.protobuf.FileDescriptorProto.sourceCodeInfo: object expected"); - message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.fromObject(object.sourceCodeInfo); - } - if (object.syntax != null) - message.syntax = String(object.syntax); - if (object.edition != null) - message.edition = String(object.edition); - return message; - }; - - /** - * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {google.protobuf.FileDescriptorProto} message FileDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FileDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.dependency = []; - object.messageType = []; - object.enumType = []; - object.service = []; - object.extension = []; - object.publicDependency = []; - object.weakDependency = []; - } - if (options.defaults) { - object.name = ""; - object["package"] = ""; - object.options = null; - object.sourceCodeInfo = null; - object.syntax = ""; - object.edition = ""; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message["package"] != null && message.hasOwnProperty("package")) - object["package"] = message["package"]; - if (message.dependency && message.dependency.length) { - object.dependency = []; - for (var j = 0; j < message.dependency.length; ++j) - object.dependency[j] = message.dependency[j]; - } - if (message.messageType && message.messageType.length) { - object.messageType = []; - for (var j = 0; j < message.messageType.length; ++j) - object.messageType[j] = $root.google.protobuf.DescriptorProto.toObject(message.messageType[j], options); - } - if (message.enumType && message.enumType.length) { - object.enumType = []; - for (var j = 0; j < message.enumType.length; ++j) - object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); - } - if (message.service && message.service.length) { - object.service = []; - for (var j = 0; j < message.service.length; ++j) - object.service[j] = $root.google.protobuf.ServiceDescriptorProto.toObject(message.service[j], options); - } - if (message.extension && message.extension.length) { - object.extension = []; - for (var j = 0; j < message.extension.length; ++j) - object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.FileOptions.toObject(message.options, options); - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) - object.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.toObject(message.sourceCodeInfo, options); - if (message.publicDependency && message.publicDependency.length) { - object.publicDependency = []; - for (var j = 0; j < message.publicDependency.length; ++j) - object.publicDependency[j] = message.publicDependency[j]; - } - if (message.weakDependency && message.weakDependency.length) { - object.weakDependency = []; - for (var j = 0; j < message.weakDependency.length; ++j) - object.weakDependency[j] = message.weakDependency[j]; - } - if (message.syntax != null && message.hasOwnProperty("syntax")) - object.syntax = message.syntax; - if (message.edition != null && message.hasOwnProperty("edition")) - object.edition = message.edition; - return object; - }; - - /** - * Converts this FileDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.FileDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - FileDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FileDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.FileDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FileDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FileDescriptorProto"; - }; - - return FileDescriptorProto; - })(); - - protobuf.DescriptorProto = (function() { - - /** - * Properties of a DescriptorProto. - * @memberof google.protobuf - * @interface IDescriptorProto - * @property {string|null} [name] DescriptorProto name - * @property {Array.|null} [field] DescriptorProto field - * @property {Array.|null} [extension] DescriptorProto extension - * @property {Array.|null} [nestedType] DescriptorProto nestedType - * @property {Array.|null} [enumType] DescriptorProto enumType - * @property {Array.|null} [extensionRange] DescriptorProto extensionRange - * @property {Array.|null} [oneofDecl] DescriptorProto oneofDecl - * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options - * @property {Array.|null} [reservedRange] DescriptorProto reservedRange - * @property {Array.|null} [reservedName] DescriptorProto reservedName - */ - - /** - * Constructs a new DescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a DescriptorProto. - * @implements IDescriptorProto - * @constructor - * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set - */ - function DescriptorProto(properties) { - this.field = []; - this.extension = []; - this.nestedType = []; - this.enumType = []; - this.extensionRange = []; - this.oneofDecl = []; - this.reservedRange = []; - this.reservedName = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * DescriptorProto name. - * @member {string} name - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.name = ""; - - /** - * DescriptorProto field. - * @member {Array.} field - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.field = $util.emptyArray; - - /** - * DescriptorProto extension. - * @member {Array.} extension - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.extension = $util.emptyArray; - - /** - * DescriptorProto nestedType. - * @member {Array.} nestedType - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.nestedType = $util.emptyArray; - - /** - * DescriptorProto enumType. - * @member {Array.} enumType - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.enumType = $util.emptyArray; - - /** - * DescriptorProto extensionRange. - * @member {Array.} extensionRange - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.extensionRange = $util.emptyArray; - - /** - * DescriptorProto oneofDecl. - * @member {Array.} oneofDecl - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.oneofDecl = $util.emptyArray; - - /** - * DescriptorProto options. - * @member {google.protobuf.IMessageOptions|null|undefined} options - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.options = null; - - /** - * DescriptorProto reservedRange. - * @member {Array.} reservedRange - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.reservedRange = $util.emptyArray; - - /** - * DescriptorProto reservedName. - * @member {Array.} reservedName - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.reservedName = $util.emptyArray; - - /** - * Creates a new DescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.DescriptorProto - * @static - * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto} DescriptorProto instance - */ - DescriptorProto.create = function create(properties) { - return new DescriptorProto(properties); - }; - - /** - * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DescriptorProto - * @static - * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - DescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.field != null && message.field.length) - for (var i = 0; i < message.field.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.field[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.nestedType != null && message.nestedType.length) - for (var i = 0; i < message.nestedType.length; ++i) - $root.google.protobuf.DescriptorProto.encode(message.nestedType[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.enumType != null && message.enumType.length) - for (var i = 0; i < message.enumType.length; ++i) - $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.extensionRange != null && message.extensionRange.length) - for (var i = 0; i < message.extensionRange.length; ++i) - $root.google.protobuf.DescriptorProto.ExtensionRange.encode(message.extensionRange[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.extension != null && message.extension.length) - for (var i = 0; i < message.extension.length; ++i) - $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.oneofDecl != null && message.oneofDecl.length) - for (var i = 0; i < message.oneofDecl.length; ++i) - $root.google.protobuf.OneofDescriptorProto.encode(message.oneofDecl[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.reservedRange != null && message.reservedRange.length) - for (var i = 0; i < message.reservedRange.length; ++i) - $root.google.protobuf.DescriptorProto.ReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.reservedName != null && message.reservedName.length) - for (var i = 0; i < message.reservedName.length; ++i) - writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); - return writer; - }; - - /** - * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto - * @static - * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - DescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a DescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto} DescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - DescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - if (!(message.field && message.field.length)) - message.field = []; - message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 6: { - if (!(message.extension && message.extension.length)) - message.extension = []; - message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 3: { - if (!(message.nestedType && message.nestedType.length)) - message.nestedType = []; - message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); - break; - } - case 4: { - if (!(message.enumType && message.enumType.length)) - message.enumType = []; - message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 5: { - if (!(message.extensionRange && message.extensionRange.length)) - message.extensionRange = []; - message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); - break; - } - case 8: { - if (!(message.oneofDecl && message.oneofDecl.length)) - message.oneofDecl = []; - message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 7: { - message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); - break; - } - case 9: { - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); - break; - } - case 10: { - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto} DescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - DescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a DescriptorProto message. - * @function verify - * @memberof google.protobuf.DescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - DescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.field != null && message.hasOwnProperty("field")) { - if (!Array.isArray(message.field)) - return "field: array expected"; - for (var i = 0; i < message.field.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.field[i]); - if (error) - return "field." + error; - } - } - if (message.extension != null && message.hasOwnProperty("extension")) { - if (!Array.isArray(message.extension)) - return "extension: array expected"; - for (var i = 0; i < message.extension.length; ++i) { - var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); - if (error) - return "extension." + error; - } - } - if (message.nestedType != null && message.hasOwnProperty("nestedType")) { - if (!Array.isArray(message.nestedType)) - return "nestedType: array expected"; - for (var i = 0; i < message.nestedType.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.verify(message.nestedType[i]); - if (error) - return "nestedType." + error; - } - } - if (message.enumType != null && message.hasOwnProperty("enumType")) { - if (!Array.isArray(message.enumType)) - return "enumType: array expected"; - for (var i = 0; i < message.enumType.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); - if (error) - return "enumType." + error; - } - } - if (message.extensionRange != null && message.hasOwnProperty("extensionRange")) { - if (!Array.isArray(message.extensionRange)) - return "extensionRange: array expected"; - for (var i = 0; i < message.extensionRange.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.ExtensionRange.verify(message.extensionRange[i]); - if (error) - return "extensionRange." + error; - } - } - if (message.oneofDecl != null && message.hasOwnProperty("oneofDecl")) { - if (!Array.isArray(message.oneofDecl)) - return "oneofDecl: array expected"; - for (var i = 0; i < message.oneofDecl.length; ++i) { - var error = $root.google.protobuf.OneofDescriptorProto.verify(message.oneofDecl[i]); - if (error) - return "oneofDecl." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.MessageOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { - if (!Array.isArray(message.reservedRange)) - return "reservedRange: array expected"; - for (var i = 0; i < message.reservedRange.length; ++i) { - var error = $root.google.protobuf.DescriptorProto.ReservedRange.verify(message.reservedRange[i]); - if (error) - return "reservedRange." + error; - } - } - if (message.reservedName != null && message.hasOwnProperty("reservedName")) { - if (!Array.isArray(message.reservedName)) - return "reservedName: array expected"; - for (var i = 0; i < message.reservedName.length; ++i) - if (!$util.isString(message.reservedName[i])) - return "reservedName: string[] expected"; - } - return null; - }; - - /** - * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto} DescriptorProto - */ - DescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto) - return object; - var message = new $root.google.protobuf.DescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.field) { - if (!Array.isArray(object.field)) - throw TypeError(".google.protobuf.DescriptorProto.field: array expected"); - message.field = []; - for (var i = 0; i < object.field.length; ++i) { - if (typeof object.field[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.field: object expected"); - message.field[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.field[i]); - } - } - if (object.extension) { - if (!Array.isArray(object.extension)) - throw TypeError(".google.protobuf.DescriptorProto.extension: array expected"); - message.extension = []; - for (var i = 0; i < object.extension.length; ++i) { - if (typeof object.extension[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.extension: object expected"); - message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); - } - } - if (object.nestedType) { - if (!Array.isArray(object.nestedType)) - throw TypeError(".google.protobuf.DescriptorProto.nestedType: array expected"); - message.nestedType = []; - for (var i = 0; i < object.nestedType.length; ++i) { - if (typeof object.nestedType[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.nestedType: object expected"); - message.nestedType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.nestedType[i]); - } - } - if (object.enumType) { - if (!Array.isArray(object.enumType)) - throw TypeError(".google.protobuf.DescriptorProto.enumType: array expected"); - message.enumType = []; - for (var i = 0; i < object.enumType.length; ++i) { - if (typeof object.enumType[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.enumType: object expected"); - message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); - } - } - if (object.extensionRange) { - if (!Array.isArray(object.extensionRange)) - throw TypeError(".google.protobuf.DescriptorProto.extensionRange: array expected"); - message.extensionRange = []; - for (var i = 0; i < object.extensionRange.length; ++i) { - if (typeof object.extensionRange[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.extensionRange: object expected"); - message.extensionRange[i] = $root.google.protobuf.DescriptorProto.ExtensionRange.fromObject(object.extensionRange[i]); - } - } - if (object.oneofDecl) { - if (!Array.isArray(object.oneofDecl)) - throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: array expected"); - message.oneofDecl = []; - for (var i = 0; i < object.oneofDecl.length; ++i) { - if (typeof object.oneofDecl[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: object expected"); - message.oneofDecl[i] = $root.google.protobuf.OneofDescriptorProto.fromObject(object.oneofDecl[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.DescriptorProto.options: object expected"); - message.options = $root.google.protobuf.MessageOptions.fromObject(object.options); - } - if (object.reservedRange) { - if (!Array.isArray(object.reservedRange)) - throw TypeError(".google.protobuf.DescriptorProto.reservedRange: array expected"); - message.reservedRange = []; - for (var i = 0; i < object.reservedRange.length; ++i) { - if (typeof object.reservedRange[i] !== "object") - throw TypeError(".google.protobuf.DescriptorProto.reservedRange: object expected"); - message.reservedRange[i] = $root.google.protobuf.DescriptorProto.ReservedRange.fromObject(object.reservedRange[i]); - } - } - if (object.reservedName) { - if (!Array.isArray(object.reservedName)) - throw TypeError(".google.protobuf.DescriptorProto.reservedName: array expected"); - message.reservedName = []; - for (var i = 0; i < object.reservedName.length; ++i) - message.reservedName[i] = String(object.reservedName[i]); - } - return message; - }; - - /** - * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DescriptorProto - * @static - * @param {google.protobuf.DescriptorProto} message DescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - DescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.field = []; - object.nestedType = []; - object.enumType = []; - object.extensionRange = []; - object.extension = []; - object.oneofDecl = []; - object.reservedRange = []; - object.reservedName = []; - } - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.field && message.field.length) { - object.field = []; - for (var j = 0; j < message.field.length; ++j) - object.field[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.field[j], options); - } - if (message.nestedType && message.nestedType.length) { - object.nestedType = []; - for (var j = 0; j < message.nestedType.length; ++j) - object.nestedType[j] = $root.google.protobuf.DescriptorProto.toObject(message.nestedType[j], options); - } - if (message.enumType && message.enumType.length) { - object.enumType = []; - for (var j = 0; j < message.enumType.length; ++j) - object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); - } - if (message.extensionRange && message.extensionRange.length) { - object.extensionRange = []; - for (var j = 0; j < message.extensionRange.length; ++j) - object.extensionRange[j] = $root.google.protobuf.DescriptorProto.ExtensionRange.toObject(message.extensionRange[j], options); - } - if (message.extension && message.extension.length) { - object.extension = []; - for (var j = 0; j < message.extension.length; ++j) - object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.MessageOptions.toObject(message.options, options); - if (message.oneofDecl && message.oneofDecl.length) { - object.oneofDecl = []; - for (var j = 0; j < message.oneofDecl.length; ++j) - object.oneofDecl[j] = $root.google.protobuf.OneofDescriptorProto.toObject(message.oneofDecl[j], options); - } - if (message.reservedRange && message.reservedRange.length) { - object.reservedRange = []; - for (var j = 0; j < message.reservedRange.length; ++j) - object.reservedRange[j] = $root.google.protobuf.DescriptorProto.ReservedRange.toObject(message.reservedRange[j], options); - } - if (message.reservedName && message.reservedName.length) { - object.reservedName = []; - for (var j = 0; j < message.reservedName.length; ++j) - object.reservedName[j] = message.reservedName[j]; - } - return object; - }; - - /** - * Converts this DescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.DescriptorProto - * @instance - * @returns {Object.} JSON object - */ - DescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for DescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.DescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - DescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.DescriptorProto"; - }; - - DescriptorProto.ExtensionRange = (function() { - - /** - * Properties of an ExtensionRange. - * @memberof google.protobuf.DescriptorProto - * @interface IExtensionRange - * @property {number|null} [start] ExtensionRange start - * @property {number|null} [end] ExtensionRange end - * @property {google.protobuf.IExtensionRangeOptions|null} [options] ExtensionRange options - */ - - /** - * Constructs a new ExtensionRange. - * @memberof google.protobuf.DescriptorProto - * @classdesc Represents an ExtensionRange. - * @implements IExtensionRange - * @constructor - * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set - */ - function ExtensionRange(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ExtensionRange start. - * @member {number} start - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.start = 0; - - /** - * ExtensionRange end. - * @member {number} end - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.end = 0; - - /** - * ExtensionRange options. - * @member {google.protobuf.IExtensionRangeOptions|null|undefined} options - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - */ - ExtensionRange.prototype.options = null; - - /** - * Creates a new ExtensionRange instance using the specified properties. - * @function create - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange instance - */ - ExtensionRange.create = function create(properties) { - return new ExtensionRange(properties); - }; - - /** - * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRange.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRange.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRange.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.start = reader.int32(); - break; - } - case 2: { - message.end = reader.int32(); - break; - } - case 3: { - message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRange.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ExtensionRange message. - * @function verify - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ExtensionRange.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.ExtensionRangeOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; - - /** - * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange - */ - ExtensionRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto.ExtensionRange) - return object; - var message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected"); - message.options = $root.google.protobuf.ExtensionRangeOptions.fromObject(object.options); - } - return message; - }; - - /** - * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {google.protobuf.DescriptorProto.ExtensionRange} message ExtensionRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ExtensionRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; - object.options = null; - } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.ExtensionRangeOptions.toObject(message.options, options); - return object; - }; - - /** - * Converts this ExtensionRange to JSON. - * @function toJSON - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @instance - * @returns {Object.} JSON object - */ - ExtensionRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ExtensionRange - * @function getTypeUrl - * @memberof google.protobuf.DescriptorProto.ExtensionRange - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ExtensionRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.DescriptorProto.ExtensionRange"; - }; - - return ExtensionRange; - })(); - - DescriptorProto.ReservedRange = (function() { - - /** - * Properties of a ReservedRange. - * @memberof google.protobuf.DescriptorProto - * @interface IReservedRange - * @property {number|null} [start] ReservedRange start - * @property {number|null} [end] ReservedRange end - */ - - /** - * Constructs a new ReservedRange. - * @memberof google.protobuf.DescriptorProto - * @classdesc Represents a ReservedRange. - * @implements IReservedRange - * @constructor - * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set - */ - function ReservedRange(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ReservedRange start. - * @member {number} start - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - */ - ReservedRange.prototype.start = 0; - - /** - * ReservedRange end. - * @member {number} end - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - */ - ReservedRange.prototype.end = 0; - - /** - * Creates a new ReservedRange instance using the specified properties. - * @function create - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange instance - */ - ReservedRange.create = function create(properties) { - return new ReservedRange(properties); - }; - - /** - * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReservedRange.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - return writer; - }; - - /** - * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ReservedRange.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ReservedRange message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReservedRange.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.start = reader.int32(); - break; - } - case 2: { - message.end = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ReservedRange message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ReservedRange.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ReservedRange message. - * @function verify - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ReservedRange.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - return null; - }; - - /** - * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange - */ - ReservedRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DescriptorProto.ReservedRange) - return object; - var message = new $root.google.protobuf.DescriptorProto.ReservedRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - return message; - }; - - /** - * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {google.protobuf.DescriptorProto.ReservedRange} message ReservedRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ReservedRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; - } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - return object; - }; - - /** - * Converts this ReservedRange to JSON. - * @function toJSON - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @instance - * @returns {Object.} JSON object - */ - ReservedRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ReservedRange - * @function getTypeUrl - * @memberof google.protobuf.DescriptorProto.ReservedRange - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.DescriptorProto.ReservedRange"; - }; - - return ReservedRange; - })(); - - return DescriptorProto; - })(); - - protobuf.ExtensionRangeOptions = (function() { - - /** - * Properties of an ExtensionRangeOptions. - * @memberof google.protobuf - * @interface IExtensionRangeOptions - * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption - */ - - /** - * Constructs a new ExtensionRangeOptions. - * @memberof google.protobuf - * @classdesc Represents an ExtensionRangeOptions. - * @implements IExtensionRangeOptions - * @constructor - * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set - */ - function ExtensionRangeOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ExtensionRangeOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.ExtensionRangeOptions - * @instance - */ - ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * Creates a new ExtensionRangeOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions instance - */ - ExtensionRangeOptions.create = function create(properties) { - return new ExtensionRangeOptions(properties); - }; - - /** - * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRangeOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExtensionRangeOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRangeOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExtensionRangeOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ExtensionRangeOptions message. - * @function verify - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ExtensionRangeOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - return null; - }; - - /** - * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions - */ - ExtensionRangeOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ExtensionRangeOptions) - return object; - var message = new $root.google.protobuf.ExtensionRangeOptions(); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - return message; - }; - - /** - * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {google.protobuf.ExtensionRangeOptions} message ExtensionRangeOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ExtensionRangeOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - return object; - }; - - /** - * Converts this ExtensionRangeOptions to JSON. - * @function toJSON - * @memberof google.protobuf.ExtensionRangeOptions - * @instance - * @returns {Object.} JSON object - */ - ExtensionRangeOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ExtensionRangeOptions - * @function getTypeUrl - * @memberof google.protobuf.ExtensionRangeOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ExtensionRangeOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.ExtensionRangeOptions"; - }; - - return ExtensionRangeOptions; - })(); - - protobuf.FieldDescriptorProto = (function() { - - /** - * Properties of a FieldDescriptorProto. - * @memberof google.protobuf - * @interface IFieldDescriptorProto - * @property {string|null} [name] FieldDescriptorProto name - * @property {number|null} [number] FieldDescriptorProto number - * @property {google.protobuf.FieldDescriptorProto.Label|null} [label] FieldDescriptorProto label - * @property {google.protobuf.FieldDescriptorProto.Type|null} [type] FieldDescriptorProto type - * @property {string|null} [typeName] FieldDescriptorProto typeName - * @property {string|null} [extendee] FieldDescriptorProto extendee - * @property {string|null} [defaultValue] FieldDescriptorProto defaultValue - * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex - * @property {string|null} [jsonName] FieldDescriptorProto jsonName - * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options - * @property {boolean|null} [proto3Optional] FieldDescriptorProto proto3Optional - */ - - /** - * Constructs a new FieldDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a FieldDescriptorProto. - * @implements IFieldDescriptorProto - * @constructor - * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set - */ - function FieldDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FieldDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.name = ""; - - /** - * FieldDescriptorProto number. - * @member {number} number - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.number = 0; - - /** - * FieldDescriptorProto label. - * @member {google.protobuf.FieldDescriptorProto.Label} label - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.label = 1; - - /** - * FieldDescriptorProto type. - * @member {google.protobuf.FieldDescriptorProto.Type} type - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.type = 1; - - /** - * FieldDescriptorProto typeName. - * @member {string} typeName - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.typeName = ""; - - /** - * FieldDescriptorProto extendee. - * @member {string} extendee - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.extendee = ""; - - /** - * FieldDescriptorProto defaultValue. - * @member {string} defaultValue - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.defaultValue = ""; - - /** - * FieldDescriptorProto oneofIndex. - * @member {number} oneofIndex - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.oneofIndex = 0; - - /** - * FieldDescriptorProto jsonName. - * @member {string} jsonName - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.jsonName = ""; - - /** - * FieldDescriptorProto options. - * @member {google.protobuf.IFieldOptions|null|undefined} options - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.options = null; - - /** - * FieldDescriptorProto proto3Optional. - * @member {boolean} proto3Optional - * @memberof google.protobuf.FieldDescriptorProto - * @instance - */ - FieldDescriptorProto.prototype.proto3Optional = false; - - /** - * Creates a new FieldDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto instance - */ - FieldDescriptorProto.create = function create(properties) { - return new FieldDescriptorProto(properties); - }; - - /** - * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FieldDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); - if (message.number != null && Object.hasOwnProperty.call(message, "number")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); - if (message.label != null && Object.hasOwnProperty.call(message, "label")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); - if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); - if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); - if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) - writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); - if (message.proto3Optional != null && Object.hasOwnProperty.call(message, "proto3Optional")) - writer.uint32(/* id 17, wireType 0 =*/136).bool(message.proto3Optional); - return writer; - }; - - /** - * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FieldDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FieldDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 3: { - message.number = reader.int32(); - break; - } - case 4: { - message.label = reader.int32(); - break; - } - case 5: { - message.type = reader.int32(); - break; - } - case 6: { - message.typeName = reader.string(); - break; - } - case 2: { - message.extendee = reader.string(); - break; - } - case 7: { - message.defaultValue = reader.string(); - break; - } - case 9: { - message.oneofIndex = reader.int32(); - break; - } - case 10: { - message.jsonName = reader.string(); - break; - } - case 8: { - message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); - break; - } - case 17: { - message.proto3Optional = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FieldDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FieldDescriptorProto message. - * @function verify - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FieldDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.number != null && message.hasOwnProperty("number")) - if (!$util.isInteger(message.number)) - return "number: integer expected"; - if (message.label != null && message.hasOwnProperty("label")) - switch (message.label) { - default: - return "label: enum value expected"; - case 1: - case 2: - case 3: - break; - } - if (message.type != null && message.hasOwnProperty("type")) - switch (message.type) { - default: - return "type: enum value expected"; - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - case 10: - case 11: - case 12: - case 13: - case 14: - case 15: - case 16: - case 17: - case 18: - break; - } - if (message.typeName != null && message.hasOwnProperty("typeName")) - if (!$util.isString(message.typeName)) - return "typeName: string expected"; - if (message.extendee != null && message.hasOwnProperty("extendee")) - if (!$util.isString(message.extendee)) - return "extendee: string expected"; - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) - if (!$util.isString(message.defaultValue)) - return "defaultValue: string expected"; - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) - if (!$util.isInteger(message.oneofIndex)) - return "oneofIndex: integer expected"; - if (message.jsonName != null && message.hasOwnProperty("jsonName")) - if (!$util.isString(message.jsonName)) - return "jsonName: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.FieldOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) - if (typeof message.proto3Optional !== "boolean") - return "proto3Optional: boolean expected"; - return null; - }; - - /** - * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto - */ - FieldDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FieldDescriptorProto) - return object; - var message = new $root.google.protobuf.FieldDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.number != null) - message.number = object.number | 0; - switch (object.label) { - default: - if (typeof object.label === "number") { - message.label = object.label; - break; - } - break; - case "LABEL_OPTIONAL": - case 1: - message.label = 1; - break; - case "LABEL_REQUIRED": - case 2: - message.label = 2; - break; - case "LABEL_REPEATED": - case 3: - message.label = 3; - break; - } - switch (object.type) { - default: - if (typeof object.type === "number") { - message.type = object.type; - break; - } - break; - case "TYPE_DOUBLE": - case 1: - message.type = 1; - break; - case "TYPE_FLOAT": - case 2: - message.type = 2; - break; - case "TYPE_INT64": - case 3: - message.type = 3; - break; - case "TYPE_UINT64": - case 4: - message.type = 4; - break; - case "TYPE_INT32": - case 5: - message.type = 5; - break; - case "TYPE_FIXED64": - case 6: - message.type = 6; - break; - case "TYPE_FIXED32": - case 7: - message.type = 7; - break; - case "TYPE_BOOL": - case 8: - message.type = 8; - break; - case "TYPE_STRING": - case 9: - message.type = 9; - break; - case "TYPE_GROUP": - case 10: - message.type = 10; - break; - case "TYPE_MESSAGE": - case 11: - message.type = 11; - break; - case "TYPE_BYTES": - case 12: - message.type = 12; - break; - case "TYPE_UINT32": - case 13: - message.type = 13; - break; - case "TYPE_ENUM": - case 14: - message.type = 14; - break; - case "TYPE_SFIXED32": - case 15: - message.type = 15; - break; - case "TYPE_SFIXED64": - case 16: - message.type = 16; - break; - case "TYPE_SINT32": - case 17: - message.type = 17; - break; - case "TYPE_SINT64": - case 18: - message.type = 18; - break; - } - if (object.typeName != null) - message.typeName = String(object.typeName); - if (object.extendee != null) - message.extendee = String(object.extendee); - if (object.defaultValue != null) - message.defaultValue = String(object.defaultValue); - if (object.oneofIndex != null) - message.oneofIndex = object.oneofIndex | 0; - if (object.jsonName != null) - message.jsonName = String(object.jsonName); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); - } - if (object.proto3Optional != null) - message.proto3Optional = Boolean(object.proto3Optional); - return message; - }; - - /** - * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {google.protobuf.FieldDescriptorProto} message FieldDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FieldDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.extendee = ""; - object.number = 0; - object.label = options.enums === String ? "LABEL_OPTIONAL" : 1; - object.type = options.enums === String ? "TYPE_DOUBLE" : 1; - object.typeName = ""; - object.defaultValue = ""; - object.options = null; - object.oneofIndex = 0; - object.jsonName = ""; - object.proto3Optional = false; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.extendee != null && message.hasOwnProperty("extendee")) - object.extendee = message.extendee; - if (message.number != null && message.hasOwnProperty("number")) - object.number = message.number; - if (message.label != null && message.hasOwnProperty("label")) - object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] === undefined ? message.label : $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; - if (message.type != null && message.hasOwnProperty("type")) - object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] === undefined ? message.type : $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; - if (message.typeName != null && message.hasOwnProperty("typeName")) - object.typeName = message.typeName; - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) - object.defaultValue = message.defaultValue; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.FieldOptions.toObject(message.options, options); - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) - object.oneofIndex = message.oneofIndex; - if (message.jsonName != null && message.hasOwnProperty("jsonName")) - object.jsonName = message.jsonName; - if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) - object.proto3Optional = message.proto3Optional; - return object; - }; - - /** - * Converts this FieldDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.FieldDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - FieldDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FieldDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.FieldDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FieldDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FieldDescriptorProto"; - }; - - /** - * Type enum. - * @name google.protobuf.FieldDescriptorProto.Type - * @enum {number} - * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value - * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value - * @property {number} TYPE_INT64=3 TYPE_INT64 value - * @property {number} TYPE_UINT64=4 TYPE_UINT64 value - * @property {number} TYPE_INT32=5 TYPE_INT32 value - * @property {number} TYPE_FIXED64=6 TYPE_FIXED64 value - * @property {number} TYPE_FIXED32=7 TYPE_FIXED32 value - * @property {number} TYPE_BOOL=8 TYPE_BOOL value - * @property {number} TYPE_STRING=9 TYPE_STRING value - * @property {number} TYPE_GROUP=10 TYPE_GROUP value - * @property {number} TYPE_MESSAGE=11 TYPE_MESSAGE value - * @property {number} TYPE_BYTES=12 TYPE_BYTES value - * @property {number} TYPE_UINT32=13 TYPE_UINT32 value - * @property {number} TYPE_ENUM=14 TYPE_ENUM value - * @property {number} TYPE_SFIXED32=15 TYPE_SFIXED32 value - * @property {number} TYPE_SFIXED64=16 TYPE_SFIXED64 value - * @property {number} TYPE_SINT32=17 TYPE_SINT32 value - * @property {number} TYPE_SINT64=18 TYPE_SINT64 value - */ - FieldDescriptorProto.Type = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "TYPE_DOUBLE"] = 1; - values[valuesById[2] = "TYPE_FLOAT"] = 2; - values[valuesById[3] = "TYPE_INT64"] = 3; - values[valuesById[4] = "TYPE_UINT64"] = 4; - values[valuesById[5] = "TYPE_INT32"] = 5; - values[valuesById[6] = "TYPE_FIXED64"] = 6; - values[valuesById[7] = "TYPE_FIXED32"] = 7; - values[valuesById[8] = "TYPE_BOOL"] = 8; - values[valuesById[9] = "TYPE_STRING"] = 9; - values[valuesById[10] = "TYPE_GROUP"] = 10; - values[valuesById[11] = "TYPE_MESSAGE"] = 11; - values[valuesById[12] = "TYPE_BYTES"] = 12; - values[valuesById[13] = "TYPE_UINT32"] = 13; - values[valuesById[14] = "TYPE_ENUM"] = 14; - values[valuesById[15] = "TYPE_SFIXED32"] = 15; - values[valuesById[16] = "TYPE_SFIXED64"] = 16; - values[valuesById[17] = "TYPE_SINT32"] = 17; - values[valuesById[18] = "TYPE_SINT64"] = 18; - return values; - })(); - - /** - * Label enum. - * @name google.protobuf.FieldDescriptorProto.Label - * @enum {number} - * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value - * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value - * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value - */ - FieldDescriptorProto.Label = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "LABEL_OPTIONAL"] = 1; - values[valuesById[2] = "LABEL_REQUIRED"] = 2; - values[valuesById[3] = "LABEL_REPEATED"] = 3; - return values; - })(); - - return FieldDescriptorProto; - })(); - - protobuf.OneofDescriptorProto = (function() { - - /** - * Properties of an OneofDescriptorProto. - * @memberof google.protobuf - * @interface IOneofDescriptorProto - * @property {string|null} [name] OneofDescriptorProto name - * @property {google.protobuf.IOneofOptions|null} [options] OneofDescriptorProto options - */ - - /** - * Constructs a new OneofDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents an OneofDescriptorProto. - * @implements IOneofDescriptorProto - * @constructor - * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set - */ - function OneofDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * OneofDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.OneofDescriptorProto - * @instance - */ - OneofDescriptorProto.prototype.name = ""; - - /** - * OneofDescriptorProto options. - * @member {google.protobuf.IOneofOptions|null|undefined} options - * @memberof google.protobuf.OneofDescriptorProto - * @instance - */ - OneofDescriptorProto.prototype.options = null; - - /** - * Creates a new OneofDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto instance - */ - OneofDescriptorProto.create = function create(properties) { - return new OneofDescriptorProto(properties); - }; - - /** - * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - OneofDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - OneofDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an OneofDescriptorProto message. - * @function verify - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - OneofDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.OneofOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; - - /** - * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto - */ - OneofDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.OneofDescriptorProto) - return object; - var message = new $root.google.protobuf.OneofDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.OneofOptions.fromObject(object.options); - } - return message; - }; - - /** - * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {google.protobuf.OneofDescriptorProto} message OneofDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - OneofDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.OneofOptions.toObject(message.options, options); - return object; - }; - - /** - * Converts this OneofDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.OneofDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - OneofDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for OneofDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.OneofDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - OneofDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.OneofDescriptorProto"; - }; - - return OneofDescriptorProto; - })(); - - protobuf.EnumDescriptorProto = (function() { - - /** - * Properties of an EnumDescriptorProto. - * @memberof google.protobuf - * @interface IEnumDescriptorProto - * @property {string|null} [name] EnumDescriptorProto name - * @property {Array.|null} [value] EnumDescriptorProto value - * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options - * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange - * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName - */ - - /** - * Constructs a new EnumDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents an EnumDescriptorProto. - * @implements IEnumDescriptorProto - * @constructor - * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set - */ - function EnumDescriptorProto(properties) { - this.value = []; - this.reservedRange = []; - this.reservedName = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.name = ""; - - /** - * EnumDescriptorProto value. - * @member {Array.} value - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.value = $util.emptyArray; - - /** - * EnumDescriptorProto options. - * @member {google.protobuf.IEnumOptions|null|undefined} options - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.options = null; - - /** - * EnumDescriptorProto reservedRange. - * @member {Array.} reservedRange - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.reservedRange = $util.emptyArray; - - /** - * EnumDescriptorProto reservedName. - * @member {Array.} reservedName - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.reservedName = $util.emptyArray; - - /** - * Creates a new EnumDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto instance - */ - EnumDescriptorProto.create = function create(properties) { - return new EnumDescriptorProto(properties); - }; - - /** - * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.value != null && message.value.length) - for (var i = 0; i < message.value.length; ++i) - $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.reservedRange != null && message.reservedRange.length) - for (var i = 0; i < message.reservedRange.length; ++i) - $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.reservedName != null && message.reservedName.length) - for (var i = 0; i < message.reservedName.length; ++i) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); - return writer; - }; - - /** - * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - if (!(message.value && message.value.length)) - message.value = []; - message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 3: { - message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); - break; - } - case 4: { - if (!(message.reservedRange && message.reservedRange.length)) - message.reservedRange = []; - message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); - break; - } - case 5: { - if (!(message.reservedName && message.reservedName.length)) - message.reservedName = []; - message.reservedName.push(reader.string()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an EnumDescriptorProto message. - * @function verify - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.value != null && message.hasOwnProperty("value")) { - if (!Array.isArray(message.value)) - return "value: array expected"; - for (var i = 0; i < message.value.length; ++i) { - var error = $root.google.protobuf.EnumValueDescriptorProto.verify(message.value[i]); - if (error) - return "value." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.EnumOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { - if (!Array.isArray(message.reservedRange)) - return "reservedRange: array expected"; - for (var i = 0; i < message.reservedRange.length; ++i) { - var error = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.verify(message.reservedRange[i]); - if (error) - return "reservedRange." + error; - } - } - if (message.reservedName != null && message.hasOwnProperty("reservedName")) { - if (!Array.isArray(message.reservedName)) - return "reservedName: array expected"; - for (var i = 0; i < message.reservedName.length; ++i) - if (!$util.isString(message.reservedName[i])) - return "reservedName: string[] expected"; - } - return null; - }; - - /** - * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto - */ - EnumDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumDescriptorProto) - return object; - var message = new $root.google.protobuf.EnumDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.value) { - if (!Array.isArray(object.value)) - throw TypeError(".google.protobuf.EnumDescriptorProto.value: array expected"); - message.value = []; - for (var i = 0; i < object.value.length; ++i) { - if (typeof object.value[i] !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.value: object expected"); - message.value[i] = $root.google.protobuf.EnumValueDescriptorProto.fromObject(object.value[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.EnumOptions.fromObject(object.options); - } - if (object.reservedRange) { - if (!Array.isArray(object.reservedRange)) - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: array expected"); - message.reservedRange = []; - for (var i = 0; i < object.reservedRange.length; ++i) { - if (typeof object.reservedRange[i] !== "object") - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: object expected"); - message.reservedRange[i] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.fromObject(object.reservedRange[i]); - } - } - if (object.reservedName) { - if (!Array.isArray(object.reservedName)) - throw TypeError(".google.protobuf.EnumDescriptorProto.reservedName: array expected"); - message.reservedName = []; - for (var i = 0; i < object.reservedName.length; ++i) - message.reservedName[i] = String(object.reservedName[i]); - } - return message; - }; - - /** - * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {google.protobuf.EnumDescriptorProto} message EnumDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.value = []; - object.reservedRange = []; - object.reservedName = []; - } - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.value && message.value.length) { - object.value = []; - for (var j = 0; j < message.value.length; ++j) - object.value[j] = $root.google.protobuf.EnumValueDescriptorProto.toObject(message.value[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.EnumOptions.toObject(message.options, options); - if (message.reservedRange && message.reservedRange.length) { - object.reservedRange = []; - for (var j = 0; j < message.reservedRange.length; ++j) - object.reservedRange[j] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.toObject(message.reservedRange[j], options); - } - if (message.reservedName && message.reservedName.length) { - object.reservedName = []; - for (var j = 0; j < message.reservedName.length; ++j) - object.reservedName[j] = message.reservedName[j]; - } - return object; - }; - - /** - * Converts this EnumDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.EnumDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - EnumDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for EnumDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.EnumDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - EnumDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto"; - }; - - EnumDescriptorProto.EnumReservedRange = (function() { - - /** - * Properties of an EnumReservedRange. - * @memberof google.protobuf.EnumDescriptorProto - * @interface IEnumReservedRange - * @property {number|null} [start] EnumReservedRange start - * @property {number|null} [end] EnumReservedRange end - */ - - /** - * Constructs a new EnumReservedRange. - * @memberof google.protobuf.EnumDescriptorProto - * @classdesc Represents an EnumReservedRange. - * @implements IEnumReservedRange - * @constructor - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set - */ - function EnumReservedRange(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumReservedRange start. - * @member {number} start - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @instance - */ - EnumReservedRange.prototype.start = 0; - - /** - * EnumReservedRange end. - * @member {number} end - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @instance - */ - EnumReservedRange.prototype.end = 0; - - /** - * Creates a new EnumReservedRange instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange instance - */ - EnumReservedRange.create = function create(properties) { - return new EnumReservedRange(properties); - }; - - /** - * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumReservedRange.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.start != null && Object.hasOwnProperty.call(message, "start")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - return writer; - }; - - /** - * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumReservedRange.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumReservedRange message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumReservedRange.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.start = reader.int32(); - break; - } - case 2: { - message.end = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumReservedRange.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an EnumReservedRange message. - * @function verify - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumReservedRange.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.start != null && message.hasOwnProperty("start")) - if (!$util.isInteger(message.start)) - return "start: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - return null; - }; - - /** - * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange - */ - EnumReservedRange.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumDescriptorProto.EnumReservedRange) - return object; - var message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); - if (object.start != null) - message.start = object.start | 0; - if (object.end != null) - message.end = object.end | 0; - return message; - }; - - /** - * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {google.protobuf.EnumDescriptorProto.EnumReservedRange} message EnumReservedRange - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumReservedRange.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.start = 0; - object.end = 0; - } - if (message.start != null && message.hasOwnProperty("start")) - object.start = message.start; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - return object; - }; - - /** - * Converts this EnumReservedRange to JSON. - * @function toJSON - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @instance - * @returns {Object.} JSON object - */ - EnumReservedRange.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for EnumReservedRange - * @function getTypeUrl - * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - EnumReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto.EnumReservedRange"; - }; - - return EnumReservedRange; - })(); - - return EnumDescriptorProto; - })(); - - protobuf.EnumValueDescriptorProto = (function() { - - /** - * Properties of an EnumValueDescriptorProto. - * @memberof google.protobuf - * @interface IEnumValueDescriptorProto - * @property {string|null} [name] EnumValueDescriptorProto name - * @property {number|null} [number] EnumValueDescriptorProto number - * @property {google.protobuf.IEnumValueOptions|null} [options] EnumValueDescriptorProto options - */ - - /** - * Constructs a new EnumValueDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents an EnumValueDescriptorProto. - * @implements IEnumValueDescriptorProto - * @constructor - * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set - */ - function EnumValueDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumValueDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.name = ""; - - /** - * EnumValueDescriptorProto number. - * @member {number} number - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.number = 0; - - /** - * EnumValueDescriptorProto options. - * @member {google.protobuf.IEnumValueOptions|null|undefined} options - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - */ - EnumValueDescriptorProto.prototype.options = null; - - /** - * Creates a new EnumValueDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto instance - */ - EnumValueDescriptorProto.create = function create(properties) { - return new EnumValueDescriptorProto(properties); - }; - - /** - * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.number != null && Object.hasOwnProperty.call(message, "number")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.number = reader.int32(); - break; - } - case 3: { - message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an EnumValueDescriptorProto message. - * @function verify - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumValueDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.number != null && message.hasOwnProperty("number")) - if (!$util.isInteger(message.number)) - return "number: integer expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.EnumValueOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; - - /** - * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto - */ - EnumValueDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumValueDescriptorProto) - return object; - var message = new $root.google.protobuf.EnumValueDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.number != null) - message.number = object.number | 0; - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.EnumValueOptions.fromObject(object.options); - } - return message; - }; - - /** - * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {google.protobuf.EnumValueDescriptorProto} message EnumValueDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumValueDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.number = 0; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.number != null && message.hasOwnProperty("number")) - object.number = message.number; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.EnumValueOptions.toObject(message.options, options); - return object; - }; - - /** - * Converts this EnumValueDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.EnumValueDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - EnumValueDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for EnumValueDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.EnumValueDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - EnumValueDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.EnumValueDescriptorProto"; - }; - - return EnumValueDescriptorProto; - })(); - - protobuf.ServiceDescriptorProto = (function() { - - /** - * Properties of a ServiceDescriptorProto. - * @memberof google.protobuf - * @interface IServiceDescriptorProto - * @property {string|null} [name] ServiceDescriptorProto name - * @property {Array.|null} [method] ServiceDescriptorProto method - * @property {google.protobuf.IServiceOptions|null} [options] ServiceDescriptorProto options - */ - - /** - * Constructs a new ServiceDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a ServiceDescriptorProto. - * @implements IServiceDescriptorProto - * @constructor - * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set - */ - function ServiceDescriptorProto(properties) { - this.method = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ServiceDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - */ - ServiceDescriptorProto.prototype.name = ""; - - /** - * ServiceDescriptorProto method. - * @member {Array.} method - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - */ - ServiceDescriptorProto.prototype.method = $util.emptyArray; - - /** - * ServiceDescriptorProto options. - * @member {google.protobuf.IServiceOptions|null|undefined} options - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - */ - ServiceDescriptorProto.prototype.options = null; - - /** - * Creates a new ServiceDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto instance - */ - ServiceDescriptorProto.create = function create(properties) { - return new ServiceDescriptorProto(properties); - }; - - /** - * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ServiceDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.method != null && message.method.length) - for (var i = 0; i < message.method.length; ++i) - $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ServiceDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ServiceDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - if (!(message.method && message.method.length)) - message.method = []; - message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); - break; - } - case 3: { - message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ServiceDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ServiceDescriptorProto message. - * @function verify - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ServiceDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.method != null && message.hasOwnProperty("method")) { - if (!Array.isArray(message.method)) - return "method: array expected"; - for (var i = 0; i < message.method.length; ++i) { - var error = $root.google.protobuf.MethodDescriptorProto.verify(message.method[i]); - if (error) - return "method." + error; - } - } - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.ServiceOptions.verify(message.options); - if (error) - return "options." + error; - } - return null; - }; - - /** - * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto - */ - ServiceDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ServiceDescriptorProto) - return object; - var message = new $root.google.protobuf.ServiceDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.method) { - if (!Array.isArray(object.method)) - throw TypeError(".google.protobuf.ServiceDescriptorProto.method: array expected"); - message.method = []; - for (var i = 0; i < object.method.length; ++i) { - if (typeof object.method[i] !== "object") - throw TypeError(".google.protobuf.ServiceDescriptorProto.method: object expected"); - message.method[i] = $root.google.protobuf.MethodDescriptorProto.fromObject(object.method[i]); - } - } - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.ServiceDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.ServiceOptions.fromObject(object.options); - } - return message; - }; - - /** - * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {google.protobuf.ServiceDescriptorProto} message ServiceDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ServiceDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.method = []; - if (options.defaults) { - object.name = ""; - object.options = null; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.method && message.method.length) { - object.method = []; - for (var j = 0; j < message.method.length; ++j) - object.method[j] = $root.google.protobuf.MethodDescriptorProto.toObject(message.method[j], options); - } - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.ServiceOptions.toObject(message.options, options); - return object; - }; - - /** - * Converts this ServiceDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.ServiceDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - ServiceDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ServiceDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.ServiceDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ServiceDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.ServiceDescriptorProto"; - }; - - return ServiceDescriptorProto; - })(); - - protobuf.MethodDescriptorProto = (function() { - - /** - * Properties of a MethodDescriptorProto. - * @memberof google.protobuf - * @interface IMethodDescriptorProto - * @property {string|null} [name] MethodDescriptorProto name - * @property {string|null} [inputType] MethodDescriptorProto inputType - * @property {string|null} [outputType] MethodDescriptorProto outputType - * @property {google.protobuf.IMethodOptions|null} [options] MethodDescriptorProto options - * @property {boolean|null} [clientStreaming] MethodDescriptorProto clientStreaming - * @property {boolean|null} [serverStreaming] MethodDescriptorProto serverStreaming - */ - - /** - * Constructs a new MethodDescriptorProto. - * @memberof google.protobuf - * @classdesc Represents a MethodDescriptorProto. - * @implements IMethodDescriptorProto - * @constructor - * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set - */ - function MethodDescriptorProto(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * MethodDescriptorProto name. - * @member {string} name - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.name = ""; - - /** - * MethodDescriptorProto inputType. - * @member {string} inputType - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.inputType = ""; - - /** - * MethodDescriptorProto outputType. - * @member {string} outputType - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.outputType = ""; - - /** - * MethodDescriptorProto options. - * @member {google.protobuf.IMethodOptions|null|undefined} options - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.options = null; - - /** - * MethodDescriptorProto clientStreaming. - * @member {boolean} clientStreaming - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.clientStreaming = false; - - /** - * MethodDescriptorProto serverStreaming. - * @member {boolean} serverStreaming - * @memberof google.protobuf.MethodDescriptorProto - * @instance - */ - MethodDescriptorProto.prototype.serverStreaming = false; - - /** - * Creates a new MethodDescriptorProto instance using the specified properties. - * @function create - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto instance - */ - MethodDescriptorProto.create = function create(properties) { - return new MethodDescriptorProto(properties); - }; - - /** - * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @function encode - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MethodDescriptorProto.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && Object.hasOwnProperty.call(message, "name")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); - if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); - if (message.options != null && Object.hasOwnProperty.call(message, "options")) - $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) - writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); - if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) - writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); - return writer; - }; - - /** - * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MethodDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MethodDescriptorProto.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.name = reader.string(); - break; - } - case 2: { - message.inputType = reader.string(); - break; - } - case 3: { - message.outputType = reader.string(); - break; - } - case 4: { - message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); - break; - } - case 5: { - message.clientStreaming = reader.bool(); - break; - } - case 6: { - message.serverStreaming = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MethodDescriptorProto.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a MethodDescriptorProto message. - * @function verify - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - MethodDescriptorProto.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) - if (!$util.isString(message.name)) - return "name: string expected"; - if (message.inputType != null && message.hasOwnProperty("inputType")) - if (!$util.isString(message.inputType)) - return "inputType: string expected"; - if (message.outputType != null && message.hasOwnProperty("outputType")) - if (!$util.isString(message.outputType)) - return "outputType: string expected"; - if (message.options != null && message.hasOwnProperty("options")) { - var error = $root.google.protobuf.MethodOptions.verify(message.options); - if (error) - return "options." + error; - } - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) - if (typeof message.clientStreaming !== "boolean") - return "clientStreaming: boolean expected"; - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) - if (typeof message.serverStreaming !== "boolean") - return "serverStreaming: boolean expected"; - return null; - }; - - /** - * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto - */ - MethodDescriptorProto.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MethodDescriptorProto) - return object; - var message = new $root.google.protobuf.MethodDescriptorProto(); - if (object.name != null) - message.name = String(object.name); - if (object.inputType != null) - message.inputType = String(object.inputType); - if (object.outputType != null) - message.outputType = String(object.outputType); - if (object.options != null) { - if (typeof object.options !== "object") - throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected"); - message.options = $root.google.protobuf.MethodOptions.fromObject(object.options); - } - if (object.clientStreaming != null) - message.clientStreaming = Boolean(object.clientStreaming); - if (object.serverStreaming != null) - message.serverStreaming = Boolean(object.serverStreaming); - return message; - }; - - /** - * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {google.protobuf.MethodDescriptorProto} message MethodDescriptorProto - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - MethodDescriptorProto.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.name = ""; - object.inputType = ""; - object.outputType = ""; - object.options = null; - object.clientStreaming = false; - object.serverStreaming = false; - } - if (message.name != null && message.hasOwnProperty("name")) - object.name = message.name; - if (message.inputType != null && message.hasOwnProperty("inputType")) - object.inputType = message.inputType; - if (message.outputType != null && message.hasOwnProperty("outputType")) - object.outputType = message.outputType; - if (message.options != null && message.hasOwnProperty("options")) - object.options = $root.google.protobuf.MethodOptions.toObject(message.options, options); - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) - object.clientStreaming = message.clientStreaming; - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) - object.serverStreaming = message.serverStreaming; - return object; - }; - - /** - * Converts this MethodDescriptorProto to JSON. - * @function toJSON - * @memberof google.protobuf.MethodDescriptorProto - * @instance - * @returns {Object.} JSON object - */ - MethodDescriptorProto.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for MethodDescriptorProto - * @function getTypeUrl - * @memberof google.protobuf.MethodDescriptorProto - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - MethodDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.MethodDescriptorProto"; - }; - - return MethodDescriptorProto; - })(); - - protobuf.FileOptions = (function() { - - /** - * Properties of a FileOptions. - * @memberof google.protobuf - * @interface IFileOptions - * @property {string|null} [javaPackage] FileOptions javaPackage - * @property {string|null} [javaOuterClassname] FileOptions javaOuterClassname - * @property {boolean|null} [javaMultipleFiles] FileOptions javaMultipleFiles - * @property {boolean|null} [javaGenerateEqualsAndHash] FileOptions javaGenerateEqualsAndHash - * @property {boolean|null} [javaStringCheckUtf8] FileOptions javaStringCheckUtf8 - * @property {google.protobuf.FileOptions.OptimizeMode|null} [optimizeFor] FileOptions optimizeFor - * @property {string|null} [goPackage] FileOptions goPackage - * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices - * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices - * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices - * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices - * @property {boolean|null} [deprecated] FileOptions deprecated - * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas - * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix - * @property {string|null} [csharpNamespace] FileOptions csharpNamespace - * @property {string|null} [swiftPrefix] FileOptions swiftPrefix - * @property {string|null} [phpClassPrefix] FileOptions phpClassPrefix - * @property {string|null} [phpNamespace] FileOptions phpNamespace - * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace - * @property {string|null} [rubyPackage] FileOptions rubyPackage - * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption - * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition - */ - - /** - * Constructs a new FileOptions. - * @memberof google.protobuf - * @classdesc Represents a FileOptions. - * @implements IFileOptions - * @constructor - * @param {google.protobuf.IFileOptions=} [properties] Properties to set - */ - function FileOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.resourceDefinition"] = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FileOptions javaPackage. - * @member {string} javaPackage - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaPackage = ""; - - /** - * FileOptions javaOuterClassname. - * @member {string} javaOuterClassname - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaOuterClassname = ""; - - /** - * FileOptions javaMultipleFiles. - * @member {boolean} javaMultipleFiles - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaMultipleFiles = false; - - /** - * FileOptions javaGenerateEqualsAndHash. - * @member {boolean} javaGenerateEqualsAndHash - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaGenerateEqualsAndHash = false; - - /** - * FileOptions javaStringCheckUtf8. - * @member {boolean} javaStringCheckUtf8 - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaStringCheckUtf8 = false; - - /** - * FileOptions optimizeFor. - * @member {google.protobuf.FileOptions.OptimizeMode} optimizeFor - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.optimizeFor = 1; - - /** - * FileOptions goPackage. - * @member {string} goPackage - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.goPackage = ""; - - /** - * FileOptions ccGenericServices. - * @member {boolean} ccGenericServices - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.ccGenericServices = false; - - /** - * FileOptions javaGenericServices. - * @member {boolean} javaGenericServices - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.javaGenericServices = false; - - /** - * FileOptions pyGenericServices. - * @member {boolean} pyGenericServices - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.pyGenericServices = false; - - /** - * FileOptions phpGenericServices. - * @member {boolean} phpGenericServices - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpGenericServices = false; - - /** - * FileOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.deprecated = false; - - /** - * FileOptions ccEnableArenas. - * @member {boolean} ccEnableArenas - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.ccEnableArenas = true; - - /** - * FileOptions objcClassPrefix. - * @member {string} objcClassPrefix - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.objcClassPrefix = ""; - - /** - * FileOptions csharpNamespace. - * @member {string} csharpNamespace - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.csharpNamespace = ""; - - /** - * FileOptions swiftPrefix. - * @member {string} swiftPrefix - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.swiftPrefix = ""; - - /** - * FileOptions phpClassPrefix. - * @member {string} phpClassPrefix - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpClassPrefix = ""; - - /** - * FileOptions phpNamespace. - * @member {string} phpNamespace - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpNamespace = ""; - - /** - * FileOptions phpMetadataNamespace. - * @member {string} phpMetadataNamespace - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpMetadataNamespace = ""; - - /** - * FileOptions rubyPackage. - * @member {string} rubyPackage - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.rubyPackage = ""; - - /** - * FileOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * FileOptions .google.api.resourceDefinition. - * @member {Array.} .google.api.resourceDefinition - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype[".google.api.resourceDefinition"] = $util.emptyArray; - - /** - * Creates a new FileOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.FileOptions - * @static - * @param {google.protobuf.IFileOptions=} [properties] Properties to set - * @returns {google.protobuf.FileOptions} FileOptions instance - */ - FileOptions.create = function create(properties) { - return new FileOptions(properties); - }; - - /** - * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FileOptions - * @static - * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); - if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) - writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); - if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) - writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); - if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) - writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); - if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) - writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); - if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) - writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); - if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) - writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); - if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) - writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); - if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) - writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); - if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) - writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); - if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) - writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); - if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) - writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); - if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) - writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); - if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) - writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); - if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) - writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); - if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) - writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); - if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) - writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); - if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) - writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); - if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) - writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resourceDefinition"] != null && message[".google.api.resourceDefinition"].length) - for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) - $root.google.api.ResourceDescriptor.encode(message[".google.api.resourceDefinition"][i], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FileOptions - * @static - * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FileOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FileOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FileOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FileOptions} FileOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.javaPackage = reader.string(); - break; - } - case 8: { - message.javaOuterClassname = reader.string(); - break; - } - case 10: { - message.javaMultipleFiles = reader.bool(); - break; - } - case 20: { - message.javaGenerateEqualsAndHash = reader.bool(); - break; - } - case 27: { - message.javaStringCheckUtf8 = reader.bool(); - break; - } - case 9: { - message.optimizeFor = reader.int32(); - break; - } - case 11: { - message.goPackage = reader.string(); - break; - } - case 16: { - message.ccGenericServices = reader.bool(); - break; - } - case 17: { - message.javaGenericServices = reader.bool(); - break; - } - case 18: { - message.pyGenericServices = reader.bool(); - break; - } - case 42: { - message.phpGenericServices = reader.bool(); - break; - } - case 23: { - message.deprecated = reader.bool(); - break; - } - case 31: { - message.ccEnableArenas = reader.bool(); - break; - } - case 36: { - message.objcClassPrefix = reader.string(); - break; - } - case 37: { - message.csharpNamespace = reader.string(); - break; - } - case 39: { - message.swiftPrefix = reader.string(); - break; - } - case 40: { - message.phpClassPrefix = reader.string(); - break; - } - case 41: { - message.phpNamespace = reader.string(); - break; - } - case 44: { - message.phpMetadataNamespace = reader.string(); - break; - } - case 45: { - message.rubyPackage = reader.string(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - case 1053: { - if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) - message[".google.api.resourceDefinition"] = []; - message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FileOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FileOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FileOptions} FileOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FileOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FileOptions message. - * @function verify - * @memberof google.protobuf.FileOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FileOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) - if (!$util.isString(message.javaPackage)) - return "javaPackage: string expected"; - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) - if (!$util.isString(message.javaOuterClassname)) - return "javaOuterClassname: string expected"; - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) - if (typeof message.javaMultipleFiles !== "boolean") - return "javaMultipleFiles: boolean expected"; - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) - if (typeof message.javaGenerateEqualsAndHash !== "boolean") - return "javaGenerateEqualsAndHash: boolean expected"; - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) - if (typeof message.javaStringCheckUtf8 !== "boolean") - return "javaStringCheckUtf8: boolean expected"; - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) - switch (message.optimizeFor) { - default: - return "optimizeFor: enum value expected"; - case 1: - case 2: - case 3: - break; - } - if (message.goPackage != null && message.hasOwnProperty("goPackage")) - if (!$util.isString(message.goPackage)) - return "goPackage: string expected"; - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) - if (typeof message.ccGenericServices !== "boolean") - return "ccGenericServices: boolean expected"; - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) - if (typeof message.javaGenericServices !== "boolean") - return "javaGenericServices: boolean expected"; - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) - if (typeof message.pyGenericServices !== "boolean") - return "pyGenericServices: boolean expected"; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - if (typeof message.phpGenericServices !== "boolean") - return "phpGenericServices: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) - if (typeof message.ccEnableArenas !== "boolean") - return "ccEnableArenas: boolean expected"; - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) - if (!$util.isString(message.objcClassPrefix)) - return "objcClassPrefix: string expected"; - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) - if (!$util.isString(message.csharpNamespace)) - return "csharpNamespace: string expected"; - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) - if (!$util.isString(message.swiftPrefix)) - return "swiftPrefix: string expected"; - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) - if (!$util.isString(message.phpClassPrefix)) - return "phpClassPrefix: string expected"; - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) - if (!$util.isString(message.phpNamespace)) - return "phpNamespace: string expected"; - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) - if (!$util.isString(message.phpMetadataNamespace)) - return "phpMetadataNamespace: string expected"; - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) - if (!$util.isString(message.rubyPackage)) - return "rubyPackage: string expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.resourceDefinition"] != null && message.hasOwnProperty(".google.api.resourceDefinition")) { - if (!Array.isArray(message[".google.api.resourceDefinition"])) - return ".google.api.resourceDefinition: array expected"; - for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) { - var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resourceDefinition"][i]); - if (error) - return ".google.api.resourceDefinition." + error; - } - } - return null; - }; - - /** - * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FileOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FileOptions} FileOptions - */ - FileOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FileOptions) - return object; - var message = new $root.google.protobuf.FileOptions(); - if (object.javaPackage != null) - message.javaPackage = String(object.javaPackage); - if (object.javaOuterClassname != null) - message.javaOuterClassname = String(object.javaOuterClassname); - if (object.javaMultipleFiles != null) - message.javaMultipleFiles = Boolean(object.javaMultipleFiles); - if (object.javaGenerateEqualsAndHash != null) - message.javaGenerateEqualsAndHash = Boolean(object.javaGenerateEqualsAndHash); - if (object.javaStringCheckUtf8 != null) - message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); - switch (object.optimizeFor) { - default: - if (typeof object.optimizeFor === "number") { - message.optimizeFor = object.optimizeFor; - break; - } - break; - case "SPEED": - case 1: - message.optimizeFor = 1; - break; - case "CODE_SIZE": - case 2: - message.optimizeFor = 2; - break; - case "LITE_RUNTIME": - case 3: - message.optimizeFor = 3; - break; - } - if (object.goPackage != null) - message.goPackage = String(object.goPackage); - if (object.ccGenericServices != null) - message.ccGenericServices = Boolean(object.ccGenericServices); - if (object.javaGenericServices != null) - message.javaGenericServices = Boolean(object.javaGenericServices); - if (object.pyGenericServices != null) - message.pyGenericServices = Boolean(object.pyGenericServices); - if (object.phpGenericServices != null) - message.phpGenericServices = Boolean(object.phpGenericServices); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.ccEnableArenas != null) - message.ccEnableArenas = Boolean(object.ccEnableArenas); - if (object.objcClassPrefix != null) - message.objcClassPrefix = String(object.objcClassPrefix); - if (object.csharpNamespace != null) - message.csharpNamespace = String(object.csharpNamespace); - if (object.swiftPrefix != null) - message.swiftPrefix = String(object.swiftPrefix); - if (object.phpClassPrefix != null) - message.phpClassPrefix = String(object.phpClassPrefix); - if (object.phpNamespace != null) - message.phpNamespace = String(object.phpNamespace); - if (object.phpMetadataNamespace != null) - message.phpMetadataNamespace = String(object.phpMetadataNamespace); - if (object.rubyPackage != null) - message.rubyPackage = String(object.rubyPackage); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.resourceDefinition"]) { - if (!Array.isArray(object[".google.api.resourceDefinition"])) - throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: array expected"); - message[".google.api.resourceDefinition"] = []; - for (var i = 0; i < object[".google.api.resourceDefinition"].length; ++i) { - if (typeof object[".google.api.resourceDefinition"][i] !== "object") - throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: object expected"); - message[".google.api.resourceDefinition"][i] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resourceDefinition"][i]); - } - } - return message; - }; - - /** - * Creates a plain object from a FileOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FileOptions - * @static - * @param {google.protobuf.FileOptions} message FileOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FileOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.resourceDefinition"] = []; - } - if (options.defaults) { - object.javaPackage = ""; - object.javaOuterClassname = ""; - object.optimizeFor = options.enums === String ? "SPEED" : 1; - object.javaMultipleFiles = false; - object.goPackage = ""; - object.ccGenericServices = false; - object.javaGenericServices = false; - object.pyGenericServices = false; - object.javaGenerateEqualsAndHash = false; - object.deprecated = false; - object.javaStringCheckUtf8 = false; - object.ccEnableArenas = true; - object.objcClassPrefix = ""; - object.csharpNamespace = ""; - object.swiftPrefix = ""; - object.phpClassPrefix = ""; - object.phpNamespace = ""; - object.phpGenericServices = false; - object.phpMetadataNamespace = ""; - object.rubyPackage = ""; - } - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) - object.javaPackage = message.javaPackage; - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) - object.javaOuterClassname = message.javaOuterClassname; - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) - object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] === undefined ? message.optimizeFor : $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) - object.javaMultipleFiles = message.javaMultipleFiles; - if (message.goPackage != null && message.hasOwnProperty("goPackage")) - object.goPackage = message.goPackage; - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) - object.ccGenericServices = message.ccGenericServices; - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) - object.javaGenericServices = message.javaGenericServices; - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) - object.pyGenericServices = message.pyGenericServices; - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) - object.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) - object.javaStringCheckUtf8 = message.javaStringCheckUtf8; - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) - object.ccEnableArenas = message.ccEnableArenas; - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) - object.objcClassPrefix = message.objcClassPrefix; - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) - object.csharpNamespace = message.csharpNamespace; - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) - object.swiftPrefix = message.swiftPrefix; - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) - object.phpClassPrefix = message.phpClassPrefix; - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) - object.phpNamespace = message.phpNamespace; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - object.phpGenericServices = message.phpGenericServices; - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) - object.phpMetadataNamespace = message.phpMetadataNamespace; - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) - object.rubyPackage = message.rubyPackage; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length) { - object[".google.api.resourceDefinition"] = []; - for (var j = 0; j < message[".google.api.resourceDefinition"].length; ++j) - object[".google.api.resourceDefinition"][j] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resourceDefinition"][j], options); - } - return object; - }; - - /** - * Converts this FileOptions to JSON. - * @function toJSON - * @memberof google.protobuf.FileOptions - * @instance - * @returns {Object.} JSON object - */ - FileOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FileOptions - * @function getTypeUrl - * @memberof google.protobuf.FileOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FileOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FileOptions"; - }; - - /** - * OptimizeMode enum. - * @name google.protobuf.FileOptions.OptimizeMode - * @enum {number} - * @property {number} SPEED=1 SPEED value - * @property {number} CODE_SIZE=2 CODE_SIZE value - * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value - */ - FileOptions.OptimizeMode = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[1] = "SPEED"] = 1; - values[valuesById[2] = "CODE_SIZE"] = 2; - values[valuesById[3] = "LITE_RUNTIME"] = 3; - return values; - })(); - - return FileOptions; - })(); - - protobuf.MessageOptions = (function() { - - /** - * Properties of a MessageOptions. - * @memberof google.protobuf - * @interface IMessageOptions - * @property {boolean|null} [messageSetWireFormat] MessageOptions messageSetWireFormat - * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor - * @property {boolean|null} [deprecated] MessageOptions deprecated - * @property {boolean|null} [mapEntry] MessageOptions mapEntry - * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption - * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource - */ - - /** - * Constructs a new MessageOptions. - * @memberof google.protobuf - * @classdesc Represents a MessageOptions. - * @implements IMessageOptions - * @constructor - * @param {google.protobuf.IMessageOptions=} [properties] Properties to set - */ - function MessageOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * MessageOptions messageSetWireFormat. - * @member {boolean} messageSetWireFormat - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype.messageSetWireFormat = false; - - /** - * MessageOptions noStandardDescriptorAccessor. - * @member {boolean} noStandardDescriptorAccessor - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype.noStandardDescriptorAccessor = false; - - /** - * MessageOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype.deprecated = false; - - /** - * MessageOptions mapEntry. - * @member {boolean} mapEntry - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype.mapEntry = false; - - /** - * MessageOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * MessageOptions .google.api.resource. - * @member {google.api.IResourceDescriptor|null|undefined} .google.api.resource - * @memberof google.protobuf.MessageOptions - * @instance - */ - MessageOptions.prototype[".google.api.resource"] = null; - - /** - * Creates a new MessageOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.MessageOptions - * @static - * @param {google.protobuf.IMessageOptions=} [properties] Properties to set - * @returns {google.protobuf.MessageOptions} MessageOptions instance - */ - MessageOptions.create = function create(properties) { - return new MessageOptions(properties); - }; - - /** - * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.MessageOptions - * @static - * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MessageOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); - if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) - writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) - $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.MessageOptions - * @static - * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MessageOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a MessageOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.MessageOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MessageOptions} MessageOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MessageOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.messageSetWireFormat = reader.bool(); - break; - } - case 2: { - message.noStandardDescriptorAccessor = reader.bool(); - break; - } - case 3: { - message.deprecated = reader.bool(); - break; - } - case 7: { - message.mapEntry = reader.bool(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - case 1053: { - message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a MessageOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.MessageOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MessageOptions} MessageOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MessageOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a MessageOptions message. - * @function verify - * @memberof google.protobuf.MessageOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - MessageOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) - if (typeof message.messageSetWireFormat !== "boolean") - return "messageSetWireFormat: boolean expected"; - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) - if (typeof message.noStandardDescriptorAccessor !== "boolean") - return "noStandardDescriptorAccessor: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) - if (typeof message.mapEntry !== "boolean") - return "mapEntry: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) { - var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resource"]); - if (error) - return ".google.api.resource." + error; - } - return null; - }; - - /** - * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.MessageOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.MessageOptions} MessageOptions - */ - MessageOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MessageOptions) - return object; - var message = new $root.google.protobuf.MessageOptions(); - if (object.messageSetWireFormat != null) - message.messageSetWireFormat = Boolean(object.messageSetWireFormat); - if (object.noStandardDescriptorAccessor != null) - message.noStandardDescriptorAccessor = Boolean(object.noStandardDescriptorAccessor); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.mapEntry != null) - message.mapEntry = Boolean(object.mapEntry); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.resource"] != null) { - if (typeof object[".google.api.resource"] !== "object") - throw TypeError(".google.protobuf.MessageOptions..google.api.resource: object expected"); - message[".google.api.resource"] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resource"]); - } - return message; - }; - - /** - * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.MessageOptions - * @static - * @param {google.protobuf.MessageOptions} message MessageOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - MessageOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.messageSetWireFormat = false; - object.noStandardDescriptorAccessor = false; - object.deprecated = false; - object.mapEntry = false; - object[".google.api.resource"] = null; - } - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) - object.messageSetWireFormat = message.messageSetWireFormat; - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) - object.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) - object.mapEntry = message.mapEntry; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) - object[".google.api.resource"] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resource"], options); - return object; - }; - - /** - * Converts this MessageOptions to JSON. - * @function toJSON - * @memberof google.protobuf.MessageOptions - * @instance - * @returns {Object.} JSON object - */ - MessageOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for MessageOptions - * @function getTypeUrl - * @memberof google.protobuf.MessageOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - MessageOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.MessageOptions"; - }; - - return MessageOptions; - })(); - - protobuf.FieldOptions = (function() { - - /** - * Properties of a FieldOptions. - * @memberof google.protobuf - * @interface IFieldOptions - * @property {google.protobuf.FieldOptions.CType|null} [ctype] FieldOptions ctype - * @property {boolean|null} [packed] FieldOptions packed - * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype - * @property {boolean|null} [lazy] FieldOptions lazy - * @property {boolean|null} [unverifiedLazy] FieldOptions unverifiedLazy - * @property {boolean|null} [deprecated] FieldOptions deprecated - * @property {boolean|null} [weak] FieldOptions weak - * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption - * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName - * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior - * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference - */ - - /** - * Constructs a new FieldOptions. - * @memberof google.protobuf - * @classdesc Represents a FieldOptions. - * @implements IFieldOptions - * @constructor - * @param {google.protobuf.IFieldOptions=} [properties] Properties to set - */ - function FieldOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.fieldBehavior"] = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FieldOptions ctype. - * @member {google.protobuf.FieldOptions.CType} ctype - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.ctype = 0; - - /** - * FieldOptions packed. - * @member {boolean} packed - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.packed = false; - - /** - * FieldOptions jstype. - * @member {google.protobuf.FieldOptions.JSType} jstype - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.jstype = 0; - - /** - * FieldOptions lazy. - * @member {boolean} lazy - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.lazy = false; - - /** - * FieldOptions unverifiedLazy. - * @member {boolean} unverifiedLazy - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.unverifiedLazy = false; - - /** - * FieldOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.deprecated = false; - - /** - * FieldOptions weak. - * @member {boolean} weak - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.weak = false; - - /** - * FieldOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * FieldOptions .google.cloud.bigquery.storage.v1.columnName. - * @member {string} .google.cloud.bigquery.storage.v1.columnName - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype[".google.cloud.bigquery.storage.v1.columnName"] = null; - - /** - * FieldOptions .google.api.fieldBehavior. - * @member {Array.} .google.api.fieldBehavior - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype[".google.api.fieldBehavior"] = $util.emptyArray; - - /** - * FieldOptions .google.api.resourceReference. - * @member {google.api.IResourceReference|null|undefined} .google.api.resourceReference - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype[".google.api.resourceReference"] = null; - - /** - * Creates a new FieldOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.FieldOptions - * @static - * @param {google.protobuf.IFieldOptions=} [properties] Properties to set - * @returns {google.protobuf.FieldOptions} FieldOptions instance - */ - FieldOptions.create = function create(properties) { - return new FieldOptions(properties); - }; - - /** - * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FieldOptions - * @static - * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FieldOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); - if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) - writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); - if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) - writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); - if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) - writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); - if (message.unverifiedLazy != null && Object.hasOwnProperty.call(message, "unverifiedLazy")) - writer.uint32(/* id 15, wireType 0 =*/120).bool(message.unverifiedLazy); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { - writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); - for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) - writer.int32(message[".google.api.fieldBehavior"][i]); - writer.ldelim(); - } - if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) - $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); - if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && Object.hasOwnProperty.call(message, ".google.cloud.bigquery.storage.v1.columnName")) - writer.uint32(/* id 454943157, wireType 2 =*/3639545258).string(message[".google.cloud.bigquery.storage.v1.columnName"]); - return writer; - }; - - /** - * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FieldOptions - * @static - * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FieldOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FieldOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FieldOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FieldOptions} FieldOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FieldOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.ctype = reader.int32(); - break; - } - case 2: { - message.packed = reader.bool(); - break; - } - case 6: { - message.jstype = reader.int32(); - break; - } - case 5: { - message.lazy = reader.bool(); - break; - } - case 15: { - message.unverifiedLazy = reader.bool(); - break; - } - case 3: { - message.deprecated = reader.bool(); - break; - } - case 10: { - message.weak = reader.bool(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - case 454943157: { - message[".google.cloud.bigquery.storage.v1.columnName"] = reader.string(); - break; - } - case 1052: { - if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) - message[".google.api.fieldBehavior"] = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message[".google.api.fieldBehavior"].push(reader.int32()); - } else - message[".google.api.fieldBehavior"].push(reader.int32()); - break; - } - case 1055: { - message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FieldOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FieldOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FieldOptions} FieldOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FieldOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FieldOptions message. - * @function verify - * @memberof google.protobuf.FieldOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FieldOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.ctype != null && message.hasOwnProperty("ctype")) - switch (message.ctype) { - default: - return "ctype: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.packed != null && message.hasOwnProperty("packed")) - if (typeof message.packed !== "boolean") - return "packed: boolean expected"; - if (message.jstype != null && message.hasOwnProperty("jstype")) - switch (message.jstype) { - default: - return "jstype: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.lazy != null && message.hasOwnProperty("lazy")) - if (typeof message.lazy !== "boolean") - return "lazy: boolean expected"; - if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) - if (typeof message.unverifiedLazy !== "boolean") - return "unverifiedLazy: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.weak != null && message.hasOwnProperty("weak")) - if (typeof message.weak !== "boolean") - return "weak: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) - if (!$util.isString(message[".google.cloud.bigquery.storage.v1.columnName"])) - return ".google.cloud.bigquery.storage.v1.columnName: string expected"; - if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { - if (!Array.isArray(message[".google.api.fieldBehavior"])) - return ".google.api.fieldBehavior: array expected"; - for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) - switch (message[".google.api.fieldBehavior"][i]) { - default: - return ".google.api.fieldBehavior: enum value[] expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - break; - } - } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) { - var error = $root.google.api.ResourceReference.verify(message[".google.api.resourceReference"]); - if (error) - return ".google.api.resourceReference." + error; - } - return null; - }; - - /** - * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FieldOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FieldOptions} FieldOptions - */ - FieldOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FieldOptions) - return object; - var message = new $root.google.protobuf.FieldOptions(); - switch (object.ctype) { - default: - if (typeof object.ctype === "number") { - message.ctype = object.ctype; - break; - } - break; - case "STRING": - case 0: - message.ctype = 0; - break; - case "CORD": - case 1: - message.ctype = 1; - break; - case "STRING_PIECE": - case 2: - message.ctype = 2; - break; - } - if (object.packed != null) - message.packed = Boolean(object.packed); - switch (object.jstype) { - default: - if (typeof object.jstype === "number") { - message.jstype = object.jstype; - break; - } - break; - case "JS_NORMAL": - case 0: - message.jstype = 0; - break; - case "JS_STRING": - case 1: - message.jstype = 1; - break; - case "JS_NUMBER": - case 2: - message.jstype = 2; - break; - } - if (object.lazy != null) - message.lazy = Boolean(object.lazy); - if (object.unverifiedLazy != null) - message.unverifiedLazy = Boolean(object.unverifiedLazy); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.weak != null) - message.weak = Boolean(object.weak); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.cloud.bigquery.storage.v1.columnName"] != null) - message[".google.cloud.bigquery.storage.v1.columnName"] = String(object[".google.cloud.bigquery.storage.v1.columnName"]); - if (object[".google.api.fieldBehavior"]) { - if (!Array.isArray(object[".google.api.fieldBehavior"])) - throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); - message[".google.api.fieldBehavior"] = []; - for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) - switch (object[".google.api.fieldBehavior"][i]) { - default: - if (typeof object[".google.api.fieldBehavior"][i] === "number") { - message[".google.api.fieldBehavior"][i] = object[".google.api.fieldBehavior"][i]; - break; - } - case "FIELD_BEHAVIOR_UNSPECIFIED": - case 0: - message[".google.api.fieldBehavior"][i] = 0; - break; - case "OPTIONAL": - case 1: - message[".google.api.fieldBehavior"][i] = 1; - break; - case "REQUIRED": - case 2: - message[".google.api.fieldBehavior"][i] = 2; - break; - case "OUTPUT_ONLY": - case 3: - message[".google.api.fieldBehavior"][i] = 3; - break; - case "INPUT_ONLY": - case 4: - message[".google.api.fieldBehavior"][i] = 4; - break; - case "IMMUTABLE": - case 5: - message[".google.api.fieldBehavior"][i] = 5; - break; - case "UNORDERED_LIST": - case 6: - message[".google.api.fieldBehavior"][i] = 6; - break; - case "NON_EMPTY_DEFAULT": - case 7: - message[".google.api.fieldBehavior"][i] = 7; - break; - } - } - if (object[".google.api.resourceReference"] != null) { - if (typeof object[".google.api.resourceReference"] !== "object") - throw TypeError(".google.protobuf.FieldOptions..google.api.resourceReference: object expected"); - message[".google.api.resourceReference"] = $root.google.api.ResourceReference.fromObject(object[".google.api.resourceReference"]); - } - return message; - }; - - /** - * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FieldOptions - * @static - * @param {google.protobuf.FieldOptions} message FieldOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FieldOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.fieldBehavior"] = []; - } - if (options.defaults) { - object.ctype = options.enums === String ? "STRING" : 0; - object.packed = false; - object.deprecated = false; - object.lazy = false; - object.jstype = options.enums === String ? "JS_NORMAL" : 0; - object.weak = false; - object.unverifiedLazy = false; - object[".google.api.resourceReference"] = null; - object[".google.cloud.bigquery.storage.v1.columnName"] = null; - } - if (message.ctype != null && message.hasOwnProperty("ctype")) - object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] === undefined ? message.ctype : $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; - if (message.packed != null && message.hasOwnProperty("packed")) - object.packed = message.packed; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.lazy != null && message.hasOwnProperty("lazy")) - object.lazy = message.lazy; - if (message.jstype != null && message.hasOwnProperty("jstype")) - object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] === undefined ? message.jstype : $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; - if (message.weak != null && message.hasOwnProperty("weak")) - object.weak = message.weak; - if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) - object.unverifiedLazy = message.unverifiedLazy; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { - object[".google.api.fieldBehavior"] = []; - for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) - object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] === undefined ? message[".google.api.fieldBehavior"][j] : $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; - } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) - object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); - if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) - object[".google.cloud.bigquery.storage.v1.columnName"] = message[".google.cloud.bigquery.storage.v1.columnName"]; - return object; - }; - - /** - * Converts this FieldOptions to JSON. - * @function toJSON - * @memberof google.protobuf.FieldOptions - * @instance - * @returns {Object.} JSON object - */ - FieldOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FieldOptions - * @function getTypeUrl - * @memberof google.protobuf.FieldOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FieldOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FieldOptions"; - }; - - /** - * CType enum. - * @name google.protobuf.FieldOptions.CType - * @enum {number} - * @property {number} STRING=0 STRING value - * @property {number} CORD=1 CORD value - * @property {number} STRING_PIECE=2 STRING_PIECE value - */ - FieldOptions.CType = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STRING"] = 0; - values[valuesById[1] = "CORD"] = 1; - values[valuesById[2] = "STRING_PIECE"] = 2; - return values; - })(); - - /** - * JSType enum. - * @name google.protobuf.FieldOptions.JSType - * @enum {number} - * @property {number} JS_NORMAL=0 JS_NORMAL value - * @property {number} JS_STRING=1 JS_STRING value - * @property {number} JS_NUMBER=2 JS_NUMBER value - */ - FieldOptions.JSType = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "JS_NORMAL"] = 0; - values[valuesById[1] = "JS_STRING"] = 1; - values[valuesById[2] = "JS_NUMBER"] = 2; - return values; - })(); - - return FieldOptions; - })(); - - protobuf.OneofOptions = (function() { - - /** - * Properties of an OneofOptions. - * @memberof google.protobuf - * @interface IOneofOptions - * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption - */ - - /** - * Constructs a new OneofOptions. - * @memberof google.protobuf - * @classdesc Represents an OneofOptions. - * @implements IOneofOptions - * @constructor - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set - */ - function OneofOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * OneofOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.OneofOptions - * @instance - */ - OneofOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * Creates a new OneofOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set - * @returns {google.protobuf.OneofOptions} OneofOptions instance - */ - OneofOptions.create = function create(properties) { - return new OneofOptions(properties); - }; - - /** - * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an OneofOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.OneofOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.OneofOptions} OneofOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - OneofOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an OneofOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.OneofOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.OneofOptions} OneofOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - OneofOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an OneofOptions message. - * @function verify - * @memberof google.protobuf.OneofOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - OneofOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - return null; - }; - - /** - * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.OneofOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.OneofOptions} OneofOptions - */ - OneofOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.OneofOptions) - return object; - var message = new $root.google.protobuf.OneofOptions(); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - return message; - }; - - /** - * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.OneofOptions} message OneofOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - OneofOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - return object; - }; - - /** - * Converts this OneofOptions to JSON. - * @function toJSON - * @memberof google.protobuf.OneofOptions - * @instance - * @returns {Object.} JSON object - */ - OneofOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for OneofOptions - * @function getTypeUrl - * @memberof google.protobuf.OneofOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - OneofOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.OneofOptions"; - }; - - return OneofOptions; - })(); - - protobuf.EnumOptions = (function() { - - /** - * Properties of an EnumOptions. - * @memberof google.protobuf - * @interface IEnumOptions - * @property {boolean|null} [allowAlias] EnumOptions allowAlias - * @property {boolean|null} [deprecated] EnumOptions deprecated - * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption - */ - - /** - * Constructs a new EnumOptions. - * @memberof google.protobuf - * @classdesc Represents an EnumOptions. - * @implements IEnumOptions - * @constructor - * @param {google.protobuf.IEnumOptions=} [properties] Properties to set - */ - function EnumOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumOptions allowAlias. - * @member {boolean} allowAlias - * @memberof google.protobuf.EnumOptions - * @instance - */ - EnumOptions.prototype.allowAlias = false; - - /** - * EnumOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.EnumOptions - * @instance - */ - EnumOptions.prototype.deprecated = false; - - /** - * EnumOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.EnumOptions - * @instance - */ - EnumOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * Creates a new EnumOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumOptions - * @static - * @param {google.protobuf.IEnumOptions=} [properties] Properties to set - * @returns {google.protobuf.EnumOptions} EnumOptions instance - */ - EnumOptions.create = function create(properties) { - return new EnumOptions(properties); - }; - - /** - * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumOptions - * @static - * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumOptions - * @static - * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumOptions} EnumOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - message.allowAlias = reader.bool(); - break; - } - case 3: { - message.deprecated = reader.bool(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an EnumOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumOptions} EnumOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an EnumOptions message. - * @function verify - * @memberof google.protobuf.EnumOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) - if (typeof message.allowAlias !== "boolean") - return "allowAlias: boolean expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - return null; - }; - - /** - * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumOptions} EnumOptions - */ - EnumOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumOptions) - return object; - var message = new $root.google.protobuf.EnumOptions(); - if (object.allowAlias != null) - message.allowAlias = Boolean(object.allowAlias); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - return message; - }; - - /** - * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumOptions - * @static - * @param {google.protobuf.EnumOptions} message EnumOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.allowAlias = false; - object.deprecated = false; - } - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) - object.allowAlias = message.allowAlias; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - return object; - }; - - /** - * Converts this EnumOptions to JSON. - * @function toJSON - * @memberof google.protobuf.EnumOptions - * @instance - * @returns {Object.} JSON object - */ - EnumOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for EnumOptions - * @function getTypeUrl - * @memberof google.protobuf.EnumOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - EnumOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.EnumOptions"; - }; - - return EnumOptions; - })(); - - protobuf.EnumValueOptions = (function() { - - /** - * Properties of an EnumValueOptions. - * @memberof google.protobuf - * @interface IEnumValueOptions - * @property {boolean|null} [deprecated] EnumValueOptions deprecated - * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption - */ - - /** - * Constructs a new EnumValueOptions. - * @memberof google.protobuf - * @classdesc Represents an EnumValueOptions. - * @implements IEnumValueOptions - * @constructor - * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set - */ - function EnumValueOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * EnumValueOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.EnumValueOptions - * @instance - */ - EnumValueOptions.prototype.deprecated = false; - - /** - * EnumValueOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.EnumValueOptions - * @instance - */ - EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * Creates a new EnumValueOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance - */ - EnumValueOptions.create = function create(properties) { - return new EnumValueOptions(properties); - }; - - /** - * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - EnumValueOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an EnumValueOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.deprecated = reader.bool(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - EnumValueOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an EnumValueOptions message. - * @function verify - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - EnumValueOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - return null; - }; - - /** - * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.EnumValueOptions} EnumValueOptions - */ - EnumValueOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.EnumValueOptions) - return object; - var message = new $root.google.protobuf.EnumValueOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - return message; - }; - - /** - * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {google.protobuf.EnumValueOptions} message EnumValueOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - EnumValueOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) - object.deprecated = false; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - return object; - }; - - /** - * Converts this EnumValueOptions to JSON. - * @function toJSON - * @memberof google.protobuf.EnumValueOptions - * @instance - * @returns {Object.} JSON object - */ - EnumValueOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for EnumValueOptions - * @function getTypeUrl - * @memberof google.protobuf.EnumValueOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - EnumValueOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.EnumValueOptions"; - }; - - return EnumValueOptions; - })(); - - protobuf.ServiceOptions = (function() { - - /** - * Properties of a ServiceOptions. - * @memberof google.protobuf - * @interface IServiceOptions - * @property {boolean|null} [deprecated] ServiceOptions deprecated - * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption - * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost - * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes - */ - - /** - * Constructs a new ServiceOptions. - * @memberof google.protobuf - * @classdesc Represents a ServiceOptions. - * @implements IServiceOptions - * @constructor - * @param {google.protobuf.IServiceOptions=} [properties] Properties to set - */ - function ServiceOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ServiceOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype.deprecated = false; - - /** - * ServiceOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * ServiceOptions .google.api.defaultHost. - * @member {string} .google.api.defaultHost - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype[".google.api.defaultHost"] = ""; - - /** - * ServiceOptions .google.api.oauthScopes. - * @member {string} .google.api.oauthScopes - * @memberof google.protobuf.ServiceOptions - * @instance - */ - ServiceOptions.prototype[".google.api.oauthScopes"] = ""; - - /** - * Creates a new ServiceOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.ServiceOptions - * @static - * @param {google.protobuf.IServiceOptions=} [properties] Properties to set - * @returns {google.protobuf.ServiceOptions} ServiceOptions instance - */ - ServiceOptions.create = function create(properties) { - return new ServiceOptions(properties); - }; - - /** - * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.ServiceOptions - * @static - * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ServiceOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) - writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); - if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) - writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); - return writer; - }; - - /** - * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.ServiceOptions - * @static - * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ServiceOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ServiceOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.ServiceOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.ServiceOptions} ServiceOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ServiceOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 33: { - message.deprecated = reader.bool(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - case 1049: { - message[".google.api.defaultHost"] = reader.string(); - break; - } - case 1050: { - message[".google.api.oauthScopes"] = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.ServiceOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.ServiceOptions} ServiceOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ServiceOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ServiceOptions message. - * @function verify - * @memberof google.protobuf.ServiceOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ServiceOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) - if (!$util.isString(message[".google.api.defaultHost"])) - return ".google.api.defaultHost: string expected"; - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) - if (!$util.isString(message[".google.api.oauthScopes"])) - return ".google.api.oauthScopes: string expected"; - return null; - }; - - /** - * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.ServiceOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.ServiceOptions} ServiceOptions - */ - ServiceOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.ServiceOptions) - return object; - var message = new $root.google.protobuf.ServiceOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.defaultHost"] != null) - message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); - if (object[".google.api.oauthScopes"] != null) - message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); - return message; - }; - - /** - * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.ServiceOptions - * @static - * @param {google.protobuf.ServiceOptions} message ServiceOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ServiceOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.uninterpretedOption = []; - if (options.defaults) { - object.deprecated = false; - object[".google.api.defaultHost"] = ""; - object[".google.api.oauthScopes"] = ""; - } - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) - object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) - object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; - return object; - }; - - /** - * Converts this ServiceOptions to JSON. - * @function toJSON - * @memberof google.protobuf.ServiceOptions - * @instance - * @returns {Object.} JSON object - */ - ServiceOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ServiceOptions - * @function getTypeUrl - * @memberof google.protobuf.ServiceOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ServiceOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.ServiceOptions"; - }; - - return ServiceOptions; - })(); - - protobuf.MethodOptions = (function() { - - /** - * Properties of a MethodOptions. - * @memberof google.protobuf - * @interface IMethodOptions - * @property {boolean|null} [deprecated] MethodOptions deprecated - * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel - * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption - * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http - * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature - */ - - /** - * Constructs a new MethodOptions. - * @memberof google.protobuf - * @classdesc Represents a MethodOptions. - * @implements IMethodOptions - * @constructor - * @param {google.protobuf.IMethodOptions=} [properties] Properties to set - */ - function MethodOptions(properties) { - this.uninterpretedOption = []; - this[".google.api.methodSignature"] = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * MethodOptions deprecated. - * @member {boolean} deprecated - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.deprecated = false; - - /** - * MethodOptions idempotencyLevel. - * @member {google.protobuf.MethodOptions.IdempotencyLevel} idempotencyLevel - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.idempotencyLevel = 0; - - /** - * MethodOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype.uninterpretedOption = $util.emptyArray; - - /** - * MethodOptions .google.api.http. - * @member {google.api.IHttpRule|null|undefined} .google.api.http - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype[".google.api.http"] = null; - - /** - * MethodOptions .google.api.methodSignature. - * @member {Array.} .google.api.methodSignature - * @memberof google.protobuf.MethodOptions - * @instance - */ - MethodOptions.prototype[".google.api.methodSignature"] = $util.emptyArray; - - /** - * Creates a new MethodOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.MethodOptions - * @static - * @param {google.protobuf.IMethodOptions=} [properties] Properties to set - * @returns {google.protobuf.MethodOptions} MethodOptions instance - */ - MethodOptions.create = function create(properties) { - return new MethodOptions(properties); - }; - - /** - * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.MethodOptions - * @static - * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MethodOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) - writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) - writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) - for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) - writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); - if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) - $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.MethodOptions - * @static - * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - MethodOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a MethodOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.MethodOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.MethodOptions} MethodOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MethodOptions.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 33: { - message.deprecated = reader.bool(); - break; - } - case 34: { - message.idempotencyLevel = reader.int32(); - break; - } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) - message.uninterpretedOption = []; - message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); - break; - } - case 72295728: { - message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); - break; - } - case 1051: { - if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) - message[".google.api.methodSignature"] = []; - message[".google.api.methodSignature"].push(reader.string()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a MethodOptions message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.MethodOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.MethodOptions} MethodOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - MethodOptions.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a MethodOptions message. - * @function verify - * @memberof google.protobuf.MethodOptions - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - MethodOptions.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - if (typeof message.deprecated !== "boolean") - return "deprecated: boolean expected"; - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) - switch (message.idempotencyLevel) { - default: - return "idempotencyLevel: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { - if (!Array.isArray(message.uninterpretedOption)) - return "uninterpretedOption: array expected"; - for (var i = 0; i < message.uninterpretedOption.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); - if (error) - return "uninterpretedOption." + error; - } - } - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) { - var error = $root.google.api.HttpRule.verify(message[".google.api.http"]); - if (error) - return ".google.api.http." + error; - } - if (message[".google.api.methodSignature"] != null && message.hasOwnProperty(".google.api.methodSignature")) { - if (!Array.isArray(message[".google.api.methodSignature"])) - return ".google.api.methodSignature: array expected"; - for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) - if (!$util.isString(message[".google.api.methodSignature"][i])) - return ".google.api.methodSignature: string[] expected"; - } - return null; - }; - - /** - * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.MethodOptions - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.MethodOptions} MethodOptions - */ - MethodOptions.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.MethodOptions) - return object; - var message = new $root.google.protobuf.MethodOptions(); - if (object.deprecated != null) - message.deprecated = Boolean(object.deprecated); - switch (object.idempotencyLevel) { - default: - if (typeof object.idempotencyLevel === "number") { - message.idempotencyLevel = object.idempotencyLevel; - break; - } - break; - case "IDEMPOTENCY_UNKNOWN": - case 0: - message.idempotencyLevel = 0; - break; - case "NO_SIDE_EFFECTS": - case 1: - message.idempotencyLevel = 1; - break; - case "IDEMPOTENT": - case 2: - message.idempotencyLevel = 2; - break; - } - if (object.uninterpretedOption) { - if (!Array.isArray(object.uninterpretedOption)) - throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); - message.uninterpretedOption = []; - for (var i = 0; i < object.uninterpretedOption.length; ++i) { - if (typeof object.uninterpretedOption[i] !== "object") - throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: object expected"); - message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); - } - } - if (object[".google.api.http"] != null) { - if (typeof object[".google.api.http"] !== "object") - throw TypeError(".google.protobuf.MethodOptions..google.api.http: object expected"); - message[".google.api.http"] = $root.google.api.HttpRule.fromObject(object[".google.api.http"]); - } - if (object[".google.api.methodSignature"]) { - if (!Array.isArray(object[".google.api.methodSignature"])) - throw TypeError(".google.protobuf.MethodOptions..google.api.methodSignature: array expected"); - message[".google.api.methodSignature"] = []; - for (var i = 0; i < object[".google.api.methodSignature"].length; ++i) - message[".google.api.methodSignature"][i] = String(object[".google.api.methodSignature"][i]); - } - return message; - }; - - /** - * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.MethodOptions - * @static - * @param {google.protobuf.MethodOptions} message MethodOptions - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - MethodOptions.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.uninterpretedOption = []; - object[".google.api.methodSignature"] = []; - } - if (options.defaults) { - object.deprecated = false; - object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; - object[".google.api.http"] = null; - } - if (message.deprecated != null && message.hasOwnProperty("deprecated")) - object.deprecated = message.deprecated; - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) - object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] === undefined ? message.idempotencyLevel : $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; - if (message.uninterpretedOption && message.uninterpretedOption.length) { - object.uninterpretedOption = []; - for (var j = 0; j < message.uninterpretedOption.length; ++j) - object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); - } - if (message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length) { - object[".google.api.methodSignature"] = []; - for (var j = 0; j < message[".google.api.methodSignature"].length; ++j) - object[".google.api.methodSignature"][j] = message[".google.api.methodSignature"][j]; - } - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) - object[".google.api.http"] = $root.google.api.HttpRule.toObject(message[".google.api.http"], options); - return object; - }; - - /** - * Converts this MethodOptions to JSON. - * @function toJSON - * @memberof google.protobuf.MethodOptions - * @instance - * @returns {Object.} JSON object - */ - MethodOptions.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for MethodOptions - * @function getTypeUrl - * @memberof google.protobuf.MethodOptions - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - MethodOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.MethodOptions"; - }; - - /** - * IdempotencyLevel enum. - * @name google.protobuf.MethodOptions.IdempotencyLevel - * @enum {number} - * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value - * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value - * @property {number} IDEMPOTENT=2 IDEMPOTENT value - */ - MethodOptions.IdempotencyLevel = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "IDEMPOTENCY_UNKNOWN"] = 0; - values[valuesById[1] = "NO_SIDE_EFFECTS"] = 1; - values[valuesById[2] = "IDEMPOTENT"] = 2; - return values; - })(); - - return MethodOptions; - })(); - - protobuf.UninterpretedOption = (function() { - - /** - * Properties of an UninterpretedOption. - * @memberof google.protobuf - * @interface IUninterpretedOption - * @property {Array.|null} [name] UninterpretedOption name - * @property {string|null} [identifierValue] UninterpretedOption identifierValue - * @property {number|Long|null} [positiveIntValue] UninterpretedOption positiveIntValue - * @property {number|Long|null} [negativeIntValue] UninterpretedOption negativeIntValue - * @property {number|null} [doubleValue] UninterpretedOption doubleValue - * @property {Uint8Array|null} [stringValue] UninterpretedOption stringValue - * @property {string|null} [aggregateValue] UninterpretedOption aggregateValue - */ - - /** - * Constructs a new UninterpretedOption. - * @memberof google.protobuf - * @classdesc Represents an UninterpretedOption. - * @implements IUninterpretedOption - * @constructor - * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set - */ - function UninterpretedOption(properties) { - this.name = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * UninterpretedOption name. - * @member {Array.} name - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.name = $util.emptyArray; - - /** - * UninterpretedOption identifierValue. - * @member {string} identifierValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.identifierValue = ""; - - /** - * UninterpretedOption positiveIntValue. - * @member {number|Long} positiveIntValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.positiveIntValue = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * UninterpretedOption negativeIntValue. - * @member {number|Long} negativeIntValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.negativeIntValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * UninterpretedOption doubleValue. - * @member {number} doubleValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.doubleValue = 0; - - /** - * UninterpretedOption stringValue. - * @member {Uint8Array} stringValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.stringValue = $util.newBuffer([]); - - /** - * UninterpretedOption aggregateValue. - * @member {string} aggregateValue - * @memberof google.protobuf.UninterpretedOption - * @instance - */ - UninterpretedOption.prototype.aggregateValue = ""; - - /** - * Creates a new UninterpretedOption instance using the specified properties. - * @function create - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption instance - */ - UninterpretedOption.create = function create(properties) { - return new UninterpretedOption(properties); - }; - - /** - * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @function encode - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UninterpretedOption.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.name != null && message.name.length) - for (var i = 0; i < message.name.length; ++i) - $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); - if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) - writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); - if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) - writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); - if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) - writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); - if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) - writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); - if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) - writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); - return writer; - }; - - /** - * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UninterpretedOption.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an UninterpretedOption message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UninterpretedOption.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - if (!(message.name && message.name.length)) - message.name = []; - message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); - break; - } - case 3: { - message.identifierValue = reader.string(); - break; - } - case 4: { - message.positiveIntValue = reader.uint64(); - break; - } - case 5: { - message.negativeIntValue = reader.int64(); - break; - } - case 6: { - message.doubleValue = reader.double(); - break; - } - case 7: { - message.stringValue = reader.bytes(); - break; - } - case 8: { - message.aggregateValue = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UninterpretedOption.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an UninterpretedOption message. - * @function verify - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - UninterpretedOption.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.name != null && message.hasOwnProperty("name")) { - if (!Array.isArray(message.name)) - return "name: array expected"; - for (var i = 0; i < message.name.length; ++i) { - var error = $root.google.protobuf.UninterpretedOption.NamePart.verify(message.name[i]); - if (error) - return "name." + error; - } - } - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) - if (!$util.isString(message.identifierValue)) - return "identifierValue: string expected"; - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) - if (!$util.isInteger(message.positiveIntValue) && !(message.positiveIntValue && $util.isInteger(message.positiveIntValue.low) && $util.isInteger(message.positiveIntValue.high))) - return "positiveIntValue: integer|Long expected"; - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) - if (!$util.isInteger(message.negativeIntValue) && !(message.negativeIntValue && $util.isInteger(message.negativeIntValue.low) && $util.isInteger(message.negativeIntValue.high))) - return "negativeIntValue: integer|Long expected"; - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) - if (typeof message.doubleValue !== "number") - return "doubleValue: number expected"; - if (message.stringValue != null && message.hasOwnProperty("stringValue")) - if (!(message.stringValue && typeof message.stringValue.length === "number" || $util.isString(message.stringValue))) - return "stringValue: buffer expected"; - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) - if (!$util.isString(message.aggregateValue)) - return "aggregateValue: string expected"; - return null; - }; - - /** - * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UninterpretedOption} UninterpretedOption - */ - UninterpretedOption.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UninterpretedOption) - return object; - var message = new $root.google.protobuf.UninterpretedOption(); - if (object.name) { - if (!Array.isArray(object.name)) - throw TypeError(".google.protobuf.UninterpretedOption.name: array expected"); - message.name = []; - for (var i = 0; i < object.name.length; ++i) { - if (typeof object.name[i] !== "object") - throw TypeError(".google.protobuf.UninterpretedOption.name: object expected"); - message.name[i] = $root.google.protobuf.UninterpretedOption.NamePart.fromObject(object.name[i]); - } - } - if (object.identifierValue != null) - message.identifierValue = String(object.identifierValue); - if (object.positiveIntValue != null) - if ($util.Long) - (message.positiveIntValue = $util.Long.fromValue(object.positiveIntValue)).unsigned = true; - else if (typeof object.positiveIntValue === "string") - message.positiveIntValue = parseInt(object.positiveIntValue, 10); - else if (typeof object.positiveIntValue === "number") - message.positiveIntValue = object.positiveIntValue; - else if (typeof object.positiveIntValue === "object") - message.positiveIntValue = new $util.LongBits(object.positiveIntValue.low >>> 0, object.positiveIntValue.high >>> 0).toNumber(true); - if (object.negativeIntValue != null) - if ($util.Long) - (message.negativeIntValue = $util.Long.fromValue(object.negativeIntValue)).unsigned = false; - else if (typeof object.negativeIntValue === "string") - message.negativeIntValue = parseInt(object.negativeIntValue, 10); - else if (typeof object.negativeIntValue === "number") - message.negativeIntValue = object.negativeIntValue; - else if (typeof object.negativeIntValue === "object") - message.negativeIntValue = new $util.LongBits(object.negativeIntValue.low >>> 0, object.negativeIntValue.high >>> 0).toNumber(); - if (object.doubleValue != null) - message.doubleValue = Number(object.doubleValue); - if (object.stringValue != null) - if (typeof object.stringValue === "string") - $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); - else if (object.stringValue.length >= 0) - message.stringValue = object.stringValue; - if (object.aggregateValue != null) - message.aggregateValue = String(object.aggregateValue); - return message; - }; - - /** - * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {google.protobuf.UninterpretedOption} message UninterpretedOption - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - UninterpretedOption.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.name = []; - if (options.defaults) { - object.identifierValue = ""; - if ($util.Long) { - var long = new $util.Long(0, 0, true); - object.positiveIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.positiveIntValue = options.longs === String ? "0" : 0; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.negativeIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.negativeIntValue = options.longs === String ? "0" : 0; - object.doubleValue = 0; - if (options.bytes === String) - object.stringValue = ""; - else { - object.stringValue = []; - if (options.bytes !== Array) - object.stringValue = $util.newBuffer(object.stringValue); - } - object.aggregateValue = ""; - } - if (message.name && message.name.length) { - object.name = []; - for (var j = 0; j < message.name.length; ++j) - object.name[j] = $root.google.protobuf.UninterpretedOption.NamePart.toObject(message.name[j], options); - } - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) - object.identifierValue = message.identifierValue; - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) - if (typeof message.positiveIntValue === "number") - object.positiveIntValue = options.longs === String ? String(message.positiveIntValue) : message.positiveIntValue; - else - object.positiveIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.positiveIntValue) : options.longs === Number ? new $util.LongBits(message.positiveIntValue.low >>> 0, message.positiveIntValue.high >>> 0).toNumber(true) : message.positiveIntValue; - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) - if (typeof message.negativeIntValue === "number") - object.negativeIntValue = options.longs === String ? String(message.negativeIntValue) : message.negativeIntValue; - else - object.negativeIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.negativeIntValue) : options.longs === Number ? new $util.LongBits(message.negativeIntValue.low >>> 0, message.negativeIntValue.high >>> 0).toNumber() : message.negativeIntValue; - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) - object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; - if (message.stringValue != null && message.hasOwnProperty("stringValue")) - object.stringValue = options.bytes === String ? $util.base64.encode(message.stringValue, 0, message.stringValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.stringValue) : message.stringValue; - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) - object.aggregateValue = message.aggregateValue; - return object; - }; - - /** - * Converts this UninterpretedOption to JSON. - * @function toJSON - * @memberof google.protobuf.UninterpretedOption - * @instance - * @returns {Object.} JSON object - */ - UninterpretedOption.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for UninterpretedOption - * @function getTypeUrl - * @memberof google.protobuf.UninterpretedOption - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - UninterpretedOption.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.UninterpretedOption"; - }; - - UninterpretedOption.NamePart = (function() { - - /** - * Properties of a NamePart. - * @memberof google.protobuf.UninterpretedOption - * @interface INamePart - * @property {string} namePart NamePart namePart - * @property {boolean} isExtension NamePart isExtension - */ - - /** - * Constructs a new NamePart. - * @memberof google.protobuf.UninterpretedOption - * @classdesc Represents a NamePart. - * @implements INamePart - * @constructor - * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set - */ - function NamePart(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * NamePart namePart. - * @member {string} namePart - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - */ - NamePart.prototype.namePart = ""; - - /** - * NamePart isExtension. - * @member {boolean} isExtension - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - */ - NamePart.prototype.isExtension = false; - - /** - * Creates a new NamePart instance using the specified properties. - * @function create - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart instance - */ - NamePart.create = function create(properties) { - return new NamePart(properties); - }; - - /** - * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @function encode - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - NamePart.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - writer.uint32(/* id 1, wireType 2 =*/10).string(message.namePart); - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.isExtension); - return writer; - }; - - /** - * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - NamePart.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a NamePart message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - NamePart.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.namePart = reader.string(); - break; - } - case 2: { - message.isExtension = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - if (!message.hasOwnProperty("namePart")) - throw $util.ProtocolError("missing required 'namePart'", { instance: message }); - if (!message.hasOwnProperty("isExtension")) - throw $util.ProtocolError("missing required 'isExtension'", { instance: message }); - return message; - }; - - /** - * Decodes a NamePart message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - NamePart.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a NamePart message. - * @function verify - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - NamePart.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (!$util.isString(message.namePart)) - return "namePart: string expected"; - if (typeof message.isExtension !== "boolean") - return "isExtension: boolean expected"; - return null; - }; - - /** - * Creates a NamePart message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - */ - NamePart.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) - return object; - var message = new $root.google.protobuf.UninterpretedOption.NamePart(); - if (object.namePart != null) - message.namePart = String(object.namePart); - if (object.isExtension != null) - message.isExtension = Boolean(object.isExtension); - return message; - }; - - /** - * Creates a plain object from a NamePart message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - NamePart.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.namePart = ""; - object.isExtension = false; - } - if (message.namePart != null && message.hasOwnProperty("namePart")) - object.namePart = message.namePart; - if (message.isExtension != null && message.hasOwnProperty("isExtension")) - object.isExtension = message.isExtension; - return object; - }; - - /** - * Converts this NamePart to JSON. - * @function toJSON - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - * @returns {Object.} JSON object - */ - NamePart.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for NamePart - * @function getTypeUrl - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - NamePart.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.UninterpretedOption.NamePart"; - }; - - return NamePart; - })(); - - return UninterpretedOption; - })(); - - protobuf.SourceCodeInfo = (function() { - - /** - * Properties of a SourceCodeInfo. - * @memberof google.protobuf - * @interface ISourceCodeInfo - * @property {Array.|null} [location] SourceCodeInfo location - */ - - /** - * Constructs a new SourceCodeInfo. - * @memberof google.protobuf - * @classdesc Represents a SourceCodeInfo. - * @implements ISourceCodeInfo - * @constructor - * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set - */ - function SourceCodeInfo(properties) { - this.location = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SourceCodeInfo location. - * @member {Array.} location - * @memberof google.protobuf.SourceCodeInfo - * @instance - */ - SourceCodeInfo.prototype.location = $util.emptyArray; - - /** - * Creates a new SourceCodeInfo instance using the specified properties. - * @function create - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo instance - */ - SourceCodeInfo.create = function create(properties) { - return new SourceCodeInfo(properties); - }; - - /** - * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @function encode - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SourceCodeInfo.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.location != null && message.location.length) - for (var i = 0; i < message.location.length; ++i) - $root.google.protobuf.SourceCodeInfo.Location.encode(message.location[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SourceCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SourceCodeInfo message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SourceCodeInfo.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.location && message.location.length)) - message.location = []; - message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SourceCodeInfo.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SourceCodeInfo message. - * @function verify - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SourceCodeInfo.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.location != null && message.hasOwnProperty("location")) { - if (!Array.isArray(message.location)) - return "location: array expected"; - for (var i = 0; i < message.location.length; ++i) { - var error = $root.google.protobuf.SourceCodeInfo.Location.verify(message.location[i]); - if (error) - return "location." + error; - } - } - return null; - }; - - /** - * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo - */ - SourceCodeInfo.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.SourceCodeInfo) - return object; - var message = new $root.google.protobuf.SourceCodeInfo(); - if (object.location) { - if (!Array.isArray(object.location)) - throw TypeError(".google.protobuf.SourceCodeInfo.location: array expected"); - message.location = []; - for (var i = 0; i < object.location.length; ++i) { - if (typeof object.location[i] !== "object") - throw TypeError(".google.protobuf.SourceCodeInfo.location: object expected"); - message.location[i] = $root.google.protobuf.SourceCodeInfo.Location.fromObject(object.location[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {google.protobuf.SourceCodeInfo} message SourceCodeInfo - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SourceCodeInfo.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.location = []; - if (message.location && message.location.length) { - object.location = []; - for (var j = 0; j < message.location.length; ++j) - object.location[j] = $root.google.protobuf.SourceCodeInfo.Location.toObject(message.location[j], options); - } - return object; - }; - - /** - * Converts this SourceCodeInfo to JSON. - * @function toJSON - * @memberof google.protobuf.SourceCodeInfo - * @instance - * @returns {Object.} JSON object - */ - SourceCodeInfo.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SourceCodeInfo - * @function getTypeUrl - * @memberof google.protobuf.SourceCodeInfo - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SourceCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.SourceCodeInfo"; - }; - - SourceCodeInfo.Location = (function() { - - /** - * Properties of a Location. - * @memberof google.protobuf.SourceCodeInfo - * @interface ILocation - * @property {Array.|null} [path] Location path - * @property {Array.|null} [span] Location span - * @property {string|null} [leadingComments] Location leadingComments - * @property {string|null} [trailingComments] Location trailingComments - * @property {Array.|null} [leadingDetachedComments] Location leadingDetachedComments - */ - - /** - * Constructs a new Location. - * @memberof google.protobuf.SourceCodeInfo - * @classdesc Represents a Location. - * @implements ILocation - * @constructor - * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set - */ - function Location(properties) { - this.path = []; - this.span = []; - this.leadingDetachedComments = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Location path. - * @member {Array.} path - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.path = $util.emptyArray; - - /** - * Location span. - * @member {Array.} span - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.span = $util.emptyArray; - - /** - * Location leadingComments. - * @member {string} leadingComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.leadingComments = ""; - - /** - * Location trailingComments. - * @member {string} trailingComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.trailingComments = ""; - - /** - * Location leadingDetachedComments. - * @member {Array.} leadingDetachedComments - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - */ - Location.prototype.leadingDetachedComments = $util.emptyArray; - - /** - * Creates a new Location instance using the specified properties. - * @function create - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set - * @returns {google.protobuf.SourceCodeInfo.Location} Location instance - */ - Location.create = function create(properties) { - return new Location(properties); - }; - - /** - * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @function encode - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Location.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.path != null && message.path.length) { - writer.uint32(/* id 1, wireType 2 =*/10).fork(); - for (var i = 0; i < message.path.length; ++i) - writer.int32(message.path[i]); - writer.ldelim(); - } - if (message.span != null && message.span.length) { - writer.uint32(/* id 2, wireType 2 =*/18).fork(); - for (var i = 0; i < message.span.length; ++i) - writer.int32(message.span[i]); - writer.ldelim(); - } - if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); - if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) - writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); - if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) - for (var i = 0; i < message.leadingDetachedComments.length; ++i) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.leadingDetachedComments[i]); - return writer; - }; - - /** - * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Location.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Location message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.SourceCodeInfo.Location} Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Location.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - } - case 2: { - if (!(message.span && message.span.length)) - message.span = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.span.push(reader.int32()); - } else - message.span.push(reader.int32()); - break; - } - case 3: { - message.leadingComments = reader.string(); - break; - } - case 4: { - message.trailingComments = reader.string(); - break; - } - case 6: { - if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) - message.leadingDetachedComments = []; - message.leadingDetachedComments.push(reader.string()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Location message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.SourceCodeInfo.Location} Location - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Location.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Location message. - * @function verify - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Location.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.path != null && message.hasOwnProperty("path")) { - if (!Array.isArray(message.path)) - return "path: array expected"; - for (var i = 0; i < message.path.length; ++i) - if (!$util.isInteger(message.path[i])) - return "path: integer[] expected"; - } - if (message.span != null && message.hasOwnProperty("span")) { - if (!Array.isArray(message.span)) - return "span: array expected"; - for (var i = 0; i < message.span.length; ++i) - if (!$util.isInteger(message.span[i])) - return "span: integer[] expected"; - } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) - if (!$util.isString(message.leadingComments)) - return "leadingComments: string expected"; - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) - if (!$util.isString(message.trailingComments)) - return "trailingComments: string expected"; - if (message.leadingDetachedComments != null && message.hasOwnProperty("leadingDetachedComments")) { - if (!Array.isArray(message.leadingDetachedComments)) - return "leadingDetachedComments: array expected"; - for (var i = 0; i < message.leadingDetachedComments.length; ++i) - if (!$util.isString(message.leadingDetachedComments[i])) - return "leadingDetachedComments: string[] expected"; - } - return null; - }; - - /** - * Creates a Location message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.SourceCodeInfo.Location} Location - */ - Location.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.SourceCodeInfo.Location) - return object; - var message = new $root.google.protobuf.SourceCodeInfo.Location(); - if (object.path) { - if (!Array.isArray(object.path)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.path: array expected"); - message.path = []; - for (var i = 0; i < object.path.length; ++i) - message.path[i] = object.path[i] | 0; - } - if (object.span) { - if (!Array.isArray(object.span)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.span: array expected"); - message.span = []; - for (var i = 0; i < object.span.length; ++i) - message.span[i] = object.span[i] | 0; - } - if (object.leadingComments != null) - message.leadingComments = String(object.leadingComments); - if (object.trailingComments != null) - message.trailingComments = String(object.trailingComments); - if (object.leadingDetachedComments) { - if (!Array.isArray(object.leadingDetachedComments)) - throw TypeError(".google.protobuf.SourceCodeInfo.Location.leadingDetachedComments: array expected"); - message.leadingDetachedComments = []; - for (var i = 0; i < object.leadingDetachedComments.length; ++i) - message.leadingDetachedComments[i] = String(object.leadingDetachedComments[i]); - } - return message; - }; - - /** - * Creates a plain object from a Location message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {google.protobuf.SourceCodeInfo.Location} message Location - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Location.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.path = []; - object.span = []; - object.leadingDetachedComments = []; - } - if (options.defaults) { - object.leadingComments = ""; - object.trailingComments = ""; - } - if (message.path && message.path.length) { - object.path = []; - for (var j = 0; j < message.path.length; ++j) - object.path[j] = message.path[j]; - } - if (message.span && message.span.length) { - object.span = []; - for (var j = 0; j < message.span.length; ++j) - object.span[j] = message.span[j]; - } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) - object.leadingComments = message.leadingComments; - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) - object.trailingComments = message.trailingComments; - if (message.leadingDetachedComments && message.leadingDetachedComments.length) { - object.leadingDetachedComments = []; - for (var j = 0; j < message.leadingDetachedComments.length; ++j) - object.leadingDetachedComments[j] = message.leadingDetachedComments[j]; - } - return object; - }; - - /** - * Converts this Location to JSON. - * @function toJSON - * @memberof google.protobuf.SourceCodeInfo.Location - * @instance - * @returns {Object.} JSON object - */ - Location.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Location - * @function getTypeUrl - * @memberof google.protobuf.SourceCodeInfo.Location - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Location.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.SourceCodeInfo.Location"; - }; - - return Location; - })(); - - return SourceCodeInfo; - })(); - - protobuf.GeneratedCodeInfo = (function() { - - /** - * Properties of a GeneratedCodeInfo. - * @memberof google.protobuf - * @interface IGeneratedCodeInfo - * @property {Array.|null} [annotation] GeneratedCodeInfo annotation - */ - - /** - * Constructs a new GeneratedCodeInfo. - * @memberof google.protobuf - * @classdesc Represents a GeneratedCodeInfo. - * @implements IGeneratedCodeInfo - * @constructor - * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set - */ - function GeneratedCodeInfo(properties) { - this.annotation = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * GeneratedCodeInfo annotation. - * @member {Array.} annotation - * @memberof google.protobuf.GeneratedCodeInfo - * @instance - */ - GeneratedCodeInfo.prototype.annotation = $util.emptyArray; - - /** - * Creates a new GeneratedCodeInfo instance using the specified properties. - * @function create - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo instance - */ - GeneratedCodeInfo.create = function create(properties) { - return new GeneratedCodeInfo(properties); - }; - - /** - * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @function encode - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - GeneratedCodeInfo.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.annotation != null && message.annotation.length) - for (var i = 0; i < message.annotation.length; ++i) - $root.google.protobuf.GeneratedCodeInfo.Annotation.encode(message.annotation[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - GeneratedCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - GeneratedCodeInfo.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.annotation && message.annotation.length)) - message.annotation = []; - message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - GeneratedCodeInfo.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a GeneratedCodeInfo message. - * @function verify - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - GeneratedCodeInfo.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.annotation != null && message.hasOwnProperty("annotation")) { - if (!Array.isArray(message.annotation)) - return "annotation: array expected"; - for (var i = 0; i < message.annotation.length; ++i) { - var error = $root.google.protobuf.GeneratedCodeInfo.Annotation.verify(message.annotation[i]); - if (error) - return "annotation." + error; - } - } - return null; - }; - - /** - * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo - */ - GeneratedCodeInfo.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.GeneratedCodeInfo) - return object; - var message = new $root.google.protobuf.GeneratedCodeInfo(); - if (object.annotation) { - if (!Array.isArray(object.annotation)) - throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: array expected"); - message.annotation = []; - for (var i = 0; i < object.annotation.length; ++i) { - if (typeof object.annotation[i] !== "object") - throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: object expected"); - message.annotation[i] = $root.google.protobuf.GeneratedCodeInfo.Annotation.fromObject(object.annotation[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {google.protobuf.GeneratedCodeInfo} message GeneratedCodeInfo - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - GeneratedCodeInfo.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.annotation = []; - if (message.annotation && message.annotation.length) { - object.annotation = []; - for (var j = 0; j < message.annotation.length; ++j) - object.annotation[j] = $root.google.protobuf.GeneratedCodeInfo.Annotation.toObject(message.annotation[j], options); - } - return object; - }; - - /** - * Converts this GeneratedCodeInfo to JSON. - * @function toJSON - * @memberof google.protobuf.GeneratedCodeInfo - * @instance - * @returns {Object.} JSON object - */ - GeneratedCodeInfo.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for GeneratedCodeInfo - * @function getTypeUrl - * @memberof google.protobuf.GeneratedCodeInfo - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - GeneratedCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo"; - }; - - GeneratedCodeInfo.Annotation = (function() { - - /** - * Properties of an Annotation. - * @memberof google.protobuf.GeneratedCodeInfo - * @interface IAnnotation - * @property {Array.|null} [path] Annotation path - * @property {string|null} [sourceFile] Annotation sourceFile - * @property {number|null} [begin] Annotation begin - * @property {number|null} [end] Annotation end - * @property {google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null} [semantic] Annotation semantic - */ - - /** - * Constructs a new Annotation. - * @memberof google.protobuf.GeneratedCodeInfo - * @classdesc Represents an Annotation. - * @implements IAnnotation - * @constructor - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set - */ - function Annotation(properties) { - this.path = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Annotation path. - * @member {Array.} path - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.path = $util.emptyArray; - - /** - * Annotation sourceFile. - * @member {string} sourceFile - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.sourceFile = ""; - - /** - * Annotation begin. - * @member {number} begin - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.begin = 0; - - /** - * Annotation end. - * @member {number} end - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.end = 0; - - /** - * Annotation semantic. - * @member {google.protobuf.GeneratedCodeInfo.Annotation.Semantic} semantic - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - */ - Annotation.prototype.semantic = 0; - - /** - * Creates a new Annotation instance using the specified properties. - * @function create - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation instance - */ - Annotation.create = function create(properties) { - return new Annotation(properties); - }; - - /** - * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @function encode - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Annotation.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.path != null && message.path.length) { - writer.uint32(/* id 1, wireType 2 =*/10).fork(); - for (var i = 0; i < message.path.length; ++i) - writer.int32(message.path[i]); - writer.ldelim(); - } - if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); - if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) - writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); - if (message.end != null && Object.hasOwnProperty.call(message, "end")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); - if (message.semantic != null && Object.hasOwnProperty.call(message, "semantic")) - writer.uint32(/* id 5, wireType 0 =*/40).int32(message.semantic); - return writer; - }; - - /** - * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Annotation.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Annotation message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Annotation.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.path && message.path.length)) - message.path = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.path.push(reader.int32()); - } else - message.path.push(reader.int32()); - break; - } - case 2: { - message.sourceFile = reader.string(); - break; - } - case 3: { - message.begin = reader.int32(); - break; - } - case 4: { - message.end = reader.int32(); - break; - } - case 5: { - message.semantic = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an Annotation message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Annotation.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Annotation message. - * @function verify - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Annotation.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.path != null && message.hasOwnProperty("path")) { - if (!Array.isArray(message.path)) - return "path: array expected"; - for (var i = 0; i < message.path.length; ++i) - if (!$util.isInteger(message.path[i])) - return "path: integer[] expected"; - } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) - if (!$util.isString(message.sourceFile)) - return "sourceFile: string expected"; - if (message.begin != null && message.hasOwnProperty("begin")) - if (!$util.isInteger(message.begin)) - return "begin: integer expected"; - if (message.end != null && message.hasOwnProperty("end")) - if (!$util.isInteger(message.end)) - return "end: integer expected"; - if (message.semantic != null && message.hasOwnProperty("semantic")) - switch (message.semantic) { - default: - return "semantic: enum value expected"; - case 0: - case 1: - case 2: - break; - } - return null; - }; - - /** - * Creates an Annotation message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation - */ - Annotation.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.GeneratedCodeInfo.Annotation) - return object; - var message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); - if (object.path) { - if (!Array.isArray(object.path)) - throw TypeError(".google.protobuf.GeneratedCodeInfo.Annotation.path: array expected"); - message.path = []; - for (var i = 0; i < object.path.length; ++i) - message.path[i] = object.path[i] | 0; - } - if (object.sourceFile != null) - message.sourceFile = String(object.sourceFile); - if (object.begin != null) - message.begin = object.begin | 0; - if (object.end != null) - message.end = object.end | 0; - switch (object.semantic) { - default: - if (typeof object.semantic === "number") { - message.semantic = object.semantic; - break; - } - break; - case "NONE": - case 0: - message.semantic = 0; - break; - case "SET": - case 1: - message.semantic = 1; - break; - case "ALIAS": - case 2: - message.semantic = 2; - break; - } - return message; - }; - - /** - * Creates a plain object from an Annotation message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {google.protobuf.GeneratedCodeInfo.Annotation} message Annotation - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Annotation.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.path = []; - if (options.defaults) { - object.sourceFile = ""; - object.begin = 0; - object.end = 0; - object.semantic = options.enums === String ? "NONE" : 0; - } - if (message.path && message.path.length) { - object.path = []; - for (var j = 0; j < message.path.length; ++j) - object.path[j] = message.path[j]; - } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) - object.sourceFile = message.sourceFile; - if (message.begin != null && message.hasOwnProperty("begin")) - object.begin = message.begin; - if (message.end != null && message.hasOwnProperty("end")) - object.end = message.end; - if (message.semantic != null && message.hasOwnProperty("semantic")) - object.semantic = options.enums === String ? $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] === undefined ? message.semantic : $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] : message.semantic; - return object; - }; - - /** - * Converts this Annotation to JSON. - * @function toJSON - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @instance - * @returns {Object.} JSON object - */ - Annotation.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Annotation - * @function getTypeUrl - * @memberof google.protobuf.GeneratedCodeInfo.Annotation - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Annotation.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo.Annotation"; - }; - - /** - * Semantic enum. - * @name google.protobuf.GeneratedCodeInfo.Annotation.Semantic - * @enum {number} - * @property {number} NONE=0 NONE value - * @property {number} SET=1 SET value - * @property {number} ALIAS=2 ALIAS value - */ - Annotation.Semantic = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "NONE"] = 0; - values[valuesById[1] = "SET"] = 1; - values[valuesById[2] = "ALIAS"] = 2; - return values; - })(); - - return Annotation; - })(); - - return GeneratedCodeInfo; - })(); - - protobuf.Timestamp = (function() { - - /** - * Properties of a Timestamp. - * @memberof google.protobuf - * @interface ITimestamp - * @property {number|Long|null} [seconds] Timestamp seconds - * @property {number|null} [nanos] Timestamp nanos - */ - - /** - * Constructs a new Timestamp. - * @memberof google.protobuf - * @classdesc Represents a Timestamp. - * @implements ITimestamp - * @constructor - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - */ - function Timestamp(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Timestamp seconds. - * @member {number|Long} seconds - * @memberof google.protobuf.Timestamp - * @instance - */ - Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Timestamp nanos. - * @member {number} nanos - * @memberof google.protobuf.Timestamp - * @instance - */ - Timestamp.prototype.nanos = 0; - - /** - * Creates a new Timestamp instance using the specified properties. - * @function create - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - * @returns {google.protobuf.Timestamp} Timestamp instance - */ - Timestamp.create = function create(properties) { - return new Timestamp(properties); - }; - - /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Timestamp.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); - return writer; - }; - - /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Timestamp.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Timestamp message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Timestamp - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Timestamp} Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Timestamp.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.seconds = reader.int64(); - break; - } - case 2: { - message.nanos = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Timestamp - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Timestamp} Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Timestamp.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Timestamp message. - * @function verify - * @memberof google.protobuf.Timestamp - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Timestamp.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) - return "seconds: integer|Long expected"; - if (message.nanos != null && message.hasOwnProperty("nanos")) - if (!$util.isInteger(message.nanos)) - return "nanos: integer expected"; - return null; - }; - - /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Timestamp - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Timestamp} Timestamp - */ - Timestamp.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Timestamp) - return object; - var message = new $root.google.protobuf.Timestamp(); - if (object.seconds != null) - if ($util.Long) - (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; - else if (typeof object.seconds === "string") - message.seconds = parseInt(object.seconds, 10); - else if (typeof object.seconds === "number") - message.seconds = object.seconds; - else if (typeof object.seconds === "object") - message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); - if (object.nanos != null) - message.nanos = object.nanos | 0; - return message; - }; - - /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.Timestamp} message Timestamp - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Timestamp.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.seconds = options.longs === String ? "0" : 0; - object.nanos = 0; - } - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (typeof message.seconds === "number") - object.seconds = options.longs === String ? String(message.seconds) : message.seconds; - else - object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; - if (message.nanos != null && message.hasOwnProperty("nanos")) - object.nanos = message.nanos; - return object; - }; - - /** - * Converts this Timestamp to JSON. - * @function toJSON - * @memberof google.protobuf.Timestamp - * @instance - * @returns {Object.} JSON object - */ - Timestamp.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Timestamp - * @function getTypeUrl - * @memberof google.protobuf.Timestamp - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Timestamp"; - }; - - return Timestamp; - })(); - - protobuf.DoubleValue = (function() { - - /** - * Properties of a DoubleValue. - * @memberof google.protobuf - * @interface IDoubleValue - * @property {number|null} [value] DoubleValue value - */ - - /** - * Constructs a new DoubleValue. - * @memberof google.protobuf - * @classdesc Represents a DoubleValue. - * @implements IDoubleValue - * @constructor - * @param {google.protobuf.IDoubleValue=} [properties] Properties to set - */ - function DoubleValue(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * DoubleValue value. - * @member {number} value - * @memberof google.protobuf.DoubleValue - * @instance - */ - DoubleValue.prototype.value = 0; - - /** - * Creates a new DoubleValue instance using the specified properties. - * @function create - * @memberof google.protobuf.DoubleValue - * @static - * @param {google.protobuf.IDoubleValue=} [properties] Properties to set - * @returns {google.protobuf.DoubleValue} DoubleValue instance - */ - DoubleValue.create = function create(properties) { - return new DoubleValue(properties); - }; - - /** - * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. - * @function encode - * @memberof google.protobuf.DoubleValue - * @static - * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - DoubleValue.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); - return writer; - }; - - /** - * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.DoubleValue - * @static - * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - DoubleValue.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a DoubleValue message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.DoubleValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DoubleValue} DoubleValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - DoubleValue.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.double(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a DoubleValue message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.DoubleValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.DoubleValue} DoubleValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - DoubleValue.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a DoubleValue message. - * @function verify - * @memberof google.protobuf.DoubleValue - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - DoubleValue.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (typeof message.value !== "number") - return "value: number expected"; - return null; - }; - - /** - * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.DoubleValue - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.DoubleValue} DoubleValue - */ - DoubleValue.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.DoubleValue) - return object; - var message = new $root.google.protobuf.DoubleValue(); - if (object.value != null) - message.value = Number(object.value); - return message; - }; - - /** - * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.DoubleValue - * @static - * @param {google.protobuf.DoubleValue} message DoubleValue - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - DoubleValue.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = 0; - if (message.value != null && message.hasOwnProperty("value")) - object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; - return object; - }; - - /** - * Converts this DoubleValue to JSON. - * @function toJSON - * @memberof google.protobuf.DoubleValue - * @instance - * @returns {Object.} JSON object - */ - DoubleValue.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for DoubleValue - * @function getTypeUrl - * @memberof google.protobuf.DoubleValue - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - DoubleValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.DoubleValue"; - }; - - return DoubleValue; - })(); - - protobuf.FloatValue = (function() { - - /** - * Properties of a FloatValue. - * @memberof google.protobuf - * @interface IFloatValue - * @property {number|null} [value] FloatValue value - */ - - /** - * Constructs a new FloatValue. - * @memberof google.protobuf - * @classdesc Represents a FloatValue. - * @implements IFloatValue - * @constructor - * @param {google.protobuf.IFloatValue=} [properties] Properties to set - */ - function FloatValue(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FloatValue value. - * @member {number} value - * @memberof google.protobuf.FloatValue - * @instance - */ - FloatValue.prototype.value = 0; - - /** - * Creates a new FloatValue instance using the specified properties. - * @function create - * @memberof google.protobuf.FloatValue - * @static - * @param {google.protobuf.IFloatValue=} [properties] Properties to set - * @returns {google.protobuf.FloatValue} FloatValue instance - */ - FloatValue.create = function create(properties) { - return new FloatValue(properties); - }; - - /** - * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FloatValue - * @static - * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FloatValue.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 5 =*/13).float(message.value); - return writer; - }; - - /** - * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FloatValue - * @static - * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FloatValue.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FloatValue message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FloatValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FloatValue} FloatValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FloatValue.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FloatValue(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.float(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FloatValue message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FloatValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FloatValue} FloatValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FloatValue.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FloatValue message. - * @function verify - * @memberof google.protobuf.FloatValue - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FloatValue.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (typeof message.value !== "number") - return "value: number expected"; - return null; - }; - - /** - * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FloatValue - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FloatValue} FloatValue - */ - FloatValue.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FloatValue) - return object; - var message = new $root.google.protobuf.FloatValue(); - if (object.value != null) - message.value = Number(object.value); - return message; - }; - - /** - * Creates a plain object from a FloatValue message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FloatValue - * @static - * @param {google.protobuf.FloatValue} message FloatValue - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FloatValue.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = 0; - if (message.value != null && message.hasOwnProperty("value")) - object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; - return object; - }; - - /** - * Converts this FloatValue to JSON. - * @function toJSON - * @memberof google.protobuf.FloatValue - * @instance - * @returns {Object.} JSON object - */ - FloatValue.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FloatValue - * @function getTypeUrl - * @memberof google.protobuf.FloatValue - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FloatValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FloatValue"; - }; - - return FloatValue; - })(); - - protobuf.Int64Value = (function() { - - /** - * Properties of an Int64Value. - * @memberof google.protobuf - * @interface IInt64Value - * @property {number|Long|null} [value] Int64Value value - */ - - /** - * Constructs a new Int64Value. - * @memberof google.protobuf - * @classdesc Represents an Int64Value. - * @implements IInt64Value - * @constructor - * @param {google.protobuf.IInt64Value=} [properties] Properties to set - */ - function Int64Value(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Int64Value value. - * @member {number|Long} value - * @memberof google.protobuf.Int64Value - * @instance - */ - Int64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Creates a new Int64Value instance using the specified properties. - * @function create - * @memberof google.protobuf.Int64Value - * @static - * @param {google.protobuf.IInt64Value=} [properties] Properties to set - * @returns {google.protobuf.Int64Value} Int64Value instance - */ - Int64Value.create = function create(properties) { - return new Int64Value(properties); - }; - - /** - * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Int64Value - * @static - * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Int64Value.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.value); - return writer; - }; - - /** - * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Int64Value - * @static - * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Int64Value.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Int64Value message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Int64Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Int64Value} Int64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Int64Value.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int64Value(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.int64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an Int64Value message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Int64Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Int64Value} Int64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Int64Value.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Int64Value message. - * @function verify - * @memberof google.protobuf.Int64Value - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Int64Value.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) - return "value: integer|Long expected"; - return null; - }; - - /** - * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Int64Value - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Int64Value} Int64Value - */ - Int64Value.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Int64Value) - return object; - var message = new $root.google.protobuf.Int64Value(); - if (object.value != null) - if ($util.Long) - (message.value = $util.Long.fromValue(object.value)).unsigned = false; - else if (typeof object.value === "string") - message.value = parseInt(object.value, 10); - else if (typeof object.value === "number") - message.value = object.value; - else if (typeof object.value === "object") - message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(); - return message; - }; - - /** - * Creates a plain object from an Int64Value message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Int64Value - * @static - * @param {google.protobuf.Int64Value} message Int64Value - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Int64Value.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.value = options.longs === String ? "0" : 0; - if (message.value != null && message.hasOwnProperty("value")) - if (typeof message.value === "number") - object.value = options.longs === String ? String(message.value) : message.value; - else - object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber() : message.value; - return object; - }; - - /** - * Converts this Int64Value to JSON. - * @function toJSON - * @memberof google.protobuf.Int64Value - * @instance - * @returns {Object.} JSON object - */ - Int64Value.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Int64Value - * @function getTypeUrl - * @memberof google.protobuf.Int64Value - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Int64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Int64Value"; - }; - - return Int64Value; - })(); - - protobuf.UInt64Value = (function() { - - /** - * Properties of a UInt64Value. - * @memberof google.protobuf - * @interface IUInt64Value - * @property {number|Long|null} [value] UInt64Value value - */ - - /** - * Constructs a new UInt64Value. - * @memberof google.protobuf - * @classdesc Represents a UInt64Value. - * @implements IUInt64Value - * @constructor - * @param {google.protobuf.IUInt64Value=} [properties] Properties to set - */ - function UInt64Value(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * UInt64Value value. - * @member {number|Long} value - * @memberof google.protobuf.UInt64Value - * @instance - */ - UInt64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * Creates a new UInt64Value instance using the specified properties. - * @function create - * @memberof google.protobuf.UInt64Value - * @static - * @param {google.protobuf.IUInt64Value=} [properties] Properties to set - * @returns {google.protobuf.UInt64Value} UInt64Value instance - */ - UInt64Value.create = function create(properties) { - return new UInt64Value(properties); - }; - - /** - * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. - * @function encode - * @memberof google.protobuf.UInt64Value - * @static - * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UInt64Value.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 0 =*/8).uint64(message.value); - return writer; - }; - - /** - * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.UInt64Value - * @static - * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UInt64Value.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a UInt64Value message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.UInt64Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UInt64Value} UInt64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UInt64Value.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt64Value(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.uint64(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a UInt64Value message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UInt64Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UInt64Value} UInt64Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UInt64Value.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a UInt64Value message. - * @function verify - * @memberof google.protobuf.UInt64Value - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - UInt64Value.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) - return "value: integer|Long expected"; - return null; - }; - - /** - * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UInt64Value - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UInt64Value} UInt64Value - */ - UInt64Value.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UInt64Value) - return object; - var message = new $root.google.protobuf.UInt64Value(); - if (object.value != null) - if ($util.Long) - (message.value = $util.Long.fromValue(object.value)).unsigned = true; - else if (typeof object.value === "string") - message.value = parseInt(object.value, 10); - else if (typeof object.value === "number") - message.value = object.value; - else if (typeof object.value === "object") - message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(true); - return message; - }; - - /** - * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UInt64Value - * @static - * @param {google.protobuf.UInt64Value} message UInt64Value - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - UInt64Value.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if ($util.Long) { - var long = new $util.Long(0, 0, true); - object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.value = options.longs === String ? "0" : 0; - if (message.value != null && message.hasOwnProperty("value")) - if (typeof message.value === "number") - object.value = options.longs === String ? String(message.value) : message.value; - else - object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber(true) : message.value; - return object; - }; - - /** - * Converts this UInt64Value to JSON. - * @function toJSON - * @memberof google.protobuf.UInt64Value - * @instance - * @returns {Object.} JSON object - */ - UInt64Value.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for UInt64Value - * @function getTypeUrl - * @memberof google.protobuf.UInt64Value - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - UInt64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.UInt64Value"; - }; - - return UInt64Value; - })(); - - protobuf.Int32Value = (function() { - - /** - * Properties of an Int32Value. - * @memberof google.protobuf - * @interface IInt32Value - * @property {number|null} [value] Int32Value value - */ - - /** - * Constructs a new Int32Value. - * @memberof google.protobuf - * @classdesc Represents an Int32Value. - * @implements IInt32Value - * @constructor - * @param {google.protobuf.IInt32Value=} [properties] Properties to set - */ - function Int32Value(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Int32Value value. - * @member {number} value - * @memberof google.protobuf.Int32Value - * @instance - */ - Int32Value.prototype.value = 0; - - /** - * Creates a new Int32Value instance using the specified properties. - * @function create - * @memberof google.protobuf.Int32Value - * @static - * @param {google.protobuf.IInt32Value=} [properties] Properties to set - * @returns {google.protobuf.Int32Value} Int32Value instance - */ - Int32Value.create = function create(properties) { - return new Int32Value(properties); - }; - - /** - * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Int32Value - * @static - * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Int32Value.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.value); - return writer; - }; - - /** - * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Int32Value - * @static - * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Int32Value.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Int32Value message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Int32Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Int32Value} Int32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Int32Value.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int32Value(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an Int32Value message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Int32Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Int32Value} Int32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Int32Value.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Int32Value message. - * @function verify - * @memberof google.protobuf.Int32Value - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Int32Value.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!$util.isInteger(message.value)) - return "value: integer expected"; - return null; - }; - - /** - * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Int32Value - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Int32Value} Int32Value - */ - Int32Value.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Int32Value) - return object; - var message = new $root.google.protobuf.Int32Value(); - if (object.value != null) - message.value = object.value | 0; - return message; - }; - - /** - * Creates a plain object from an Int32Value message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Int32Value - * @static - * @param {google.protobuf.Int32Value} message Int32Value - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Int32Value.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = 0; - if (message.value != null && message.hasOwnProperty("value")) - object.value = message.value; - return object; - }; - - /** - * Converts this Int32Value to JSON. - * @function toJSON - * @memberof google.protobuf.Int32Value - * @instance - * @returns {Object.} JSON object - */ - Int32Value.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Int32Value - * @function getTypeUrl - * @memberof google.protobuf.Int32Value - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Int32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Int32Value"; - }; - - return Int32Value; - })(); - - protobuf.UInt32Value = (function() { - - /** - * Properties of a UInt32Value. - * @memberof google.protobuf - * @interface IUInt32Value - * @property {number|null} [value] UInt32Value value - */ - - /** - * Constructs a new UInt32Value. - * @memberof google.protobuf - * @classdesc Represents a UInt32Value. - * @implements IUInt32Value - * @constructor - * @param {google.protobuf.IUInt32Value=} [properties] Properties to set - */ - function UInt32Value(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * UInt32Value value. - * @member {number} value - * @memberof google.protobuf.UInt32Value - * @instance - */ - UInt32Value.prototype.value = 0; - - /** - * Creates a new UInt32Value instance using the specified properties. - * @function create - * @memberof google.protobuf.UInt32Value - * @static - * @param {google.protobuf.IUInt32Value=} [properties] Properties to set - * @returns {google.protobuf.UInt32Value} UInt32Value instance - */ - UInt32Value.create = function create(properties) { - return new UInt32Value(properties); - }; - - /** - * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. - * @function encode - * @memberof google.protobuf.UInt32Value - * @static - * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UInt32Value.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 0 =*/8).uint32(message.value); - return writer; - }; - - /** - * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.UInt32Value - * @static - * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UInt32Value.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a UInt32Value message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.UInt32Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.UInt32Value} UInt32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UInt32Value.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt32Value(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.uint32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a UInt32Value message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UInt32Value - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UInt32Value} UInt32Value - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UInt32Value.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a UInt32Value message. - * @function verify - * @memberof google.protobuf.UInt32Value - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - UInt32Value.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!$util.isInteger(message.value)) - return "value: integer expected"; - return null; - }; - - /** - * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UInt32Value - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UInt32Value} UInt32Value - */ - UInt32Value.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UInt32Value) - return object; - var message = new $root.google.protobuf.UInt32Value(); - if (object.value != null) - message.value = object.value >>> 0; - return message; - }; - - /** - * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UInt32Value - * @static - * @param {google.protobuf.UInt32Value} message UInt32Value - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - UInt32Value.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = 0; - if (message.value != null && message.hasOwnProperty("value")) - object.value = message.value; - return object; - }; - - /** - * Converts this UInt32Value to JSON. - * @function toJSON - * @memberof google.protobuf.UInt32Value - * @instance - * @returns {Object.} JSON object - */ - UInt32Value.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for UInt32Value - * @function getTypeUrl - * @memberof google.protobuf.UInt32Value - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - UInt32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.UInt32Value"; - }; - - return UInt32Value; - })(); - - protobuf.BoolValue = (function() { - - /** - * Properties of a BoolValue. - * @memberof google.protobuf - * @interface IBoolValue - * @property {boolean|null} [value] BoolValue value - */ - - /** - * Constructs a new BoolValue. - * @memberof google.protobuf - * @classdesc Represents a BoolValue. - * @implements IBoolValue - * @constructor - * @param {google.protobuf.IBoolValue=} [properties] Properties to set - */ - function BoolValue(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BoolValue value. - * @member {boolean} value - * @memberof google.protobuf.BoolValue - * @instance - */ - BoolValue.prototype.value = false; - - /** - * Creates a new BoolValue instance using the specified properties. - * @function create - * @memberof google.protobuf.BoolValue - * @static - * @param {google.protobuf.IBoolValue=} [properties] Properties to set - * @returns {google.protobuf.BoolValue} BoolValue instance - */ - BoolValue.create = function create(properties) { - return new BoolValue(properties); - }; - - /** - * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. - * @function encode - * @memberof google.protobuf.BoolValue - * @static - * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BoolValue.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.value); - return writer; - }; - - /** - * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.BoolValue - * @static - * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BoolValue.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BoolValue message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.BoolValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.BoolValue} BoolValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BoolValue.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BoolValue(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BoolValue message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.BoolValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.BoolValue} BoolValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BoolValue.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BoolValue message. - * @function verify - * @memberof google.protobuf.BoolValue - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BoolValue.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (typeof message.value !== "boolean") - return "value: boolean expected"; - return null; - }; - - /** - * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.BoolValue - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.BoolValue} BoolValue - */ - BoolValue.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.BoolValue) - return object; - var message = new $root.google.protobuf.BoolValue(); - if (object.value != null) - message.value = Boolean(object.value); - return message; - }; - - /** - * Creates a plain object from a BoolValue message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.BoolValue - * @static - * @param {google.protobuf.BoolValue} message BoolValue - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BoolValue.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = false; - if (message.value != null && message.hasOwnProperty("value")) - object.value = message.value; - return object; - }; - - /** - * Converts this BoolValue to JSON. - * @function toJSON - * @memberof google.protobuf.BoolValue - * @instance - * @returns {Object.} JSON object - */ - BoolValue.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BoolValue - * @function getTypeUrl - * @memberof google.protobuf.BoolValue - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BoolValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.BoolValue"; - }; - - return BoolValue; - })(); - - protobuf.StringValue = (function() { - - /** - * Properties of a StringValue. - * @memberof google.protobuf - * @interface IStringValue - * @property {string|null} [value] StringValue value - */ - - /** - * Constructs a new StringValue. - * @memberof google.protobuf - * @classdesc Represents a StringValue. - * @implements IStringValue - * @constructor - * @param {google.protobuf.IStringValue=} [properties] Properties to set - */ - function StringValue(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * StringValue value. - * @member {string} value - * @memberof google.protobuf.StringValue - * @instance - */ - StringValue.prototype.value = ""; - - /** - * Creates a new StringValue instance using the specified properties. - * @function create - * @memberof google.protobuf.StringValue - * @static - * @param {google.protobuf.IStringValue=} [properties] Properties to set - * @returns {google.protobuf.StringValue} StringValue instance - */ - StringValue.create = function create(properties) { - return new StringValue(properties); - }; - - /** - * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. - * @function encode - * @memberof google.protobuf.StringValue - * @static - * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StringValue.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.value); - return writer; - }; - - /** - * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.StringValue - * @static - * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - StringValue.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a StringValue message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.StringValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.StringValue} StringValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StringValue.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.StringValue(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a StringValue message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.StringValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.StringValue} StringValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - StringValue.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a StringValue message. - * @function verify - * @memberof google.protobuf.StringValue - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - StringValue.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!$util.isString(message.value)) - return "value: string expected"; - return null; - }; - - /** - * Creates a StringValue message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.StringValue - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.StringValue} StringValue - */ - StringValue.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.StringValue) - return object; - var message = new $root.google.protobuf.StringValue(); - if (object.value != null) - message.value = String(object.value); - return message; - }; - - /** - * Creates a plain object from a StringValue message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.StringValue - * @static - * @param {google.protobuf.StringValue} message StringValue - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - StringValue.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - object.value = ""; - if (message.value != null && message.hasOwnProperty("value")) - object.value = message.value; - return object; - }; - - /** - * Converts this StringValue to JSON. - * @function toJSON - * @memberof google.protobuf.StringValue - * @instance - * @returns {Object.} JSON object - */ - StringValue.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for StringValue - * @function getTypeUrl - * @memberof google.protobuf.StringValue - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - StringValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.StringValue"; - }; - - return StringValue; - })(); - - protobuf.BytesValue = (function() { - - /** - * Properties of a BytesValue. - * @memberof google.protobuf - * @interface IBytesValue - * @property {Uint8Array|null} [value] BytesValue value - */ - - /** - * Constructs a new BytesValue. - * @memberof google.protobuf - * @classdesc Represents a BytesValue. - * @implements IBytesValue - * @constructor - * @param {google.protobuf.IBytesValue=} [properties] Properties to set - */ - function BytesValue(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * BytesValue value. - * @member {Uint8Array} value - * @memberof google.protobuf.BytesValue - * @instance - */ - BytesValue.prototype.value = $util.newBuffer([]); - - /** - * Creates a new BytesValue instance using the specified properties. - * @function create - * @memberof google.protobuf.BytesValue - * @static - * @param {google.protobuf.IBytesValue=} [properties] Properties to set - * @returns {google.protobuf.BytesValue} BytesValue instance - */ - BytesValue.create = function create(properties) { - return new BytesValue(properties); - }; - - /** - * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. - * @function encode - * @memberof google.protobuf.BytesValue - * @static - * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BytesValue.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.value); - return writer; - }; - - /** - * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.BytesValue - * @static - * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - BytesValue.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a BytesValue message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.BytesValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.BytesValue} BytesValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BytesValue.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BytesValue(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.value = reader.bytes(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a BytesValue message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.BytesValue - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.BytesValue} BytesValue - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - BytesValue.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a BytesValue message. - * @function verify - * @memberof google.protobuf.BytesValue - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - BytesValue.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) - return "value: buffer expected"; - return null; - }; - - /** - * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.BytesValue - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.BytesValue} BytesValue - */ - BytesValue.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.BytesValue) - return object; - var message = new $root.google.protobuf.BytesValue(); - if (object.value != null) - if (typeof object.value === "string") - $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); - else if (object.value.length >= 0) - message.value = object.value; - return message; - }; - - /** - * Creates a plain object from a BytesValue message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.BytesValue - * @static - * @param {google.protobuf.BytesValue} message BytesValue - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - BytesValue.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) - if (options.bytes === String) - object.value = ""; - else { - object.value = []; - if (options.bytes !== Array) - object.value = $util.newBuffer(object.value); - } - if (message.value != null && message.hasOwnProperty("value")) - object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; - return object; - }; - - /** - * Converts this BytesValue to JSON. - * @function toJSON - * @memberof google.protobuf.BytesValue - * @instance - * @returns {Object.} JSON object - */ - BytesValue.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for BytesValue - * @function getTypeUrl - * @memberof google.protobuf.BytesValue - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - BytesValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.BytesValue"; - }; - - return BytesValue; - })(); - - protobuf.Any = (function() { - - /** - * Properties of an Any. - * @memberof google.protobuf - * @interface IAny - * @property {string|null} [type_url] Any type_url - * @property {Uint8Array|null} [value] Any value - */ - - /** - * Constructs a new Any. - * @memberof google.protobuf - * @classdesc Represents an Any. - * @implements IAny - * @constructor - * @param {google.protobuf.IAny=} [properties] Properties to set - */ - function Any(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Any type_url. - * @member {string} type_url - * @memberof google.protobuf.Any - * @instance - */ - Any.prototype.type_url = ""; - - /** - * Any value. - * @member {Uint8Array} value - * @memberof google.protobuf.Any - * @instance - */ - Any.prototype.value = $util.newBuffer([]); - - /** - * Creates a new Any instance using the specified properties. - * @function create - * @memberof google.protobuf.Any - * @static - * @param {google.protobuf.IAny=} [properties] Properties to set - * @returns {google.protobuf.Any} Any instance - */ - Any.create = function create(properties) { - return new Any(properties); - }; - - /** - * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Any - * @static - * @param {google.protobuf.IAny} message Any message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Any.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.type_url != null && Object.hasOwnProperty.call(message, "type_url")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.type_url); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); - return writer; - }; - - /** - * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Any - * @static - * @param {google.protobuf.IAny} message Any message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Any.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Any message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Any - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Any} Any - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Any.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.type_url = reader.string(); - break; - } - case 2: { - message.value = reader.bytes(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an Any message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Any - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Any} Any - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Any.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Any message. - * @function verify - * @memberof google.protobuf.Any - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Any.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.type_url != null && message.hasOwnProperty("type_url")) - if (!$util.isString(message.type_url)) - return "type_url: string expected"; - if (message.value != null && message.hasOwnProperty("value")) - if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) - return "value: buffer expected"; - return null; - }; - - /** - * Creates an Any message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Any - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Any} Any - */ - Any.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Any) - return object; - var message = new $root.google.protobuf.Any(); - if (object.type_url != null) - message.type_url = String(object.type_url); - if (object.value != null) - if (typeof object.value === "string") - $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); - else if (object.value.length >= 0) - message.value = object.value; - return message; - }; - - /** - * Creates a plain object from an Any message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Any - * @static - * @param {google.protobuf.Any} message Any - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Any.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.type_url = ""; - if (options.bytes === String) - object.value = ""; - else { - object.value = []; - if (options.bytes !== Array) - object.value = $util.newBuffer(object.value); - } - } - if (message.type_url != null && message.hasOwnProperty("type_url")) - object.type_url = message.type_url; - if (message.value != null && message.hasOwnProperty("value")) - object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; - return object; - }; - - /** - * Converts this Any to JSON. - * @function toJSON - * @memberof google.protobuf.Any - * @instance - * @returns {Object.} JSON object - */ - Any.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Any - * @function getTypeUrl - * @memberof google.protobuf.Any - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Any.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Any"; - }; - - return Any; - })(); - - protobuf.Empty = (function() { - - /** - * Properties of an Empty. - * @memberof google.protobuf - * @interface IEmpty - */ - - /** - * Constructs a new Empty. - * @memberof google.protobuf - * @classdesc Represents an Empty. - * @implements IEmpty - * @constructor - * @param {google.protobuf.IEmpty=} [properties] Properties to set - */ - function Empty(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Creates a new Empty instance using the specified properties. - * @function create - * @memberof google.protobuf.Empty - * @static - * @param {google.protobuf.IEmpty=} [properties] Properties to set - * @returns {google.protobuf.Empty} Empty instance - */ - Empty.create = function create(properties) { - return new Empty(properties); - }; - - /** - * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Empty - * @static - * @param {google.protobuf.IEmpty} message Empty message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Empty.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - return writer; - }; - - /** - * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Empty - * @static - * @param {google.protobuf.IEmpty} message Empty message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Empty.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an Empty message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Empty - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Empty} Empty - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Empty.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an Empty message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Empty - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Empty} Empty - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Empty.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an Empty message. - * @function verify - * @memberof google.protobuf.Empty - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Empty.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - return null; - }; - - /** - * Creates an Empty message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Empty - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Empty} Empty - */ - Empty.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Empty) - return object; - return new $root.google.protobuf.Empty(); - }; - - /** - * Creates a plain object from an Empty message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Empty - * @static - * @param {google.protobuf.Empty} message Empty - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Empty.toObject = function toObject() { - return {}; - }; - - /** - * Converts this Empty to JSON. - * @function toJSON - * @memberof google.protobuf.Empty - * @instance - * @returns {Object.} JSON object - */ - Empty.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Empty - * @function getTypeUrl - * @memberof google.protobuf.Empty - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Empty.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Empty"; - }; - - return Empty; - })(); - - return protobuf; - })(); - - google.api = (function() { - - /** - * Namespace api. - * @memberof google - * @namespace - */ - var api = {}; - - api.Http = (function() { - - /** - * Properties of a Http. - * @memberof google.api - * @interface IHttp - * @property {Array.|null} [rules] Http rules - * @property {boolean|null} [fullyDecodeReservedExpansion] Http fullyDecodeReservedExpansion - */ - - /** - * Constructs a new Http. - * @memberof google.api - * @classdesc Represents a Http. - * @implements IHttp - * @constructor - * @param {google.api.IHttp=} [properties] Properties to set - */ - function Http(properties) { - this.rules = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Http rules. - * @member {Array.} rules - * @memberof google.api.Http - * @instance - */ - Http.prototype.rules = $util.emptyArray; - - /** - * Http fullyDecodeReservedExpansion. - * @member {boolean} fullyDecodeReservedExpansion - * @memberof google.api.Http - * @instance - */ - Http.prototype.fullyDecodeReservedExpansion = false; - - /** - * Creates a new Http instance using the specified properties. - * @function create - * @memberof google.api.Http - * @static - * @param {google.api.IHttp=} [properties] Properties to set - * @returns {google.api.Http} Http instance - */ - Http.create = function create(properties) { - return new Http(properties); - }; - - /** - * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @function encode - * @memberof google.api.Http - * @static - * @param {google.api.IHttp} message Http message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Http.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.rules != null && message.rules.length) - for (var i = 0; i < message.rules.length; ++i) - $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); - return writer; - }; - - /** - * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.Http - * @static - * @param {google.api.IHttp} message Http message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Http.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Http message from the specified reader or buffer. - * @function decode - * @memberof google.api.Http - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.Http} Http - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Http.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (!(message.rules && message.rules.length)) - message.rules = []; - message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); - break; - } - case 2: { - message.fullyDecodeReservedExpansion = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Http message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.Http - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.Http} Http - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Http.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Http message. - * @function verify - * @memberof google.api.Http - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Http.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.rules != null && message.hasOwnProperty("rules")) { - if (!Array.isArray(message.rules)) - return "rules: array expected"; - for (var i = 0; i < message.rules.length; ++i) { - var error = $root.google.api.HttpRule.verify(message.rules[i]); - if (error) - return "rules." + error; - } - } - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) - if (typeof message.fullyDecodeReservedExpansion !== "boolean") - return "fullyDecodeReservedExpansion: boolean expected"; - return null; - }; - - /** - * Creates a Http message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.Http - * @static - * @param {Object.} object Plain object - * @returns {google.api.Http} Http - */ - Http.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.Http) - return object; - var message = new $root.google.api.Http(); - if (object.rules) { - if (!Array.isArray(object.rules)) - throw TypeError(".google.api.Http.rules: array expected"); - message.rules = []; - for (var i = 0; i < object.rules.length; ++i) { - if (typeof object.rules[i] !== "object") - throw TypeError(".google.api.Http.rules: object expected"); - message.rules[i] = $root.google.api.HttpRule.fromObject(object.rules[i]); - } - } - if (object.fullyDecodeReservedExpansion != null) - message.fullyDecodeReservedExpansion = Boolean(object.fullyDecodeReservedExpansion); - return message; - }; - - /** - * Creates a plain object from a Http message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.Http - * @static - * @param {google.api.Http} message Http - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Http.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.rules = []; - if (options.defaults) - object.fullyDecodeReservedExpansion = false; - if (message.rules && message.rules.length) { - object.rules = []; - for (var j = 0; j < message.rules.length; ++j) - object.rules[j] = $root.google.api.HttpRule.toObject(message.rules[j], options); - } - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) - object.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; - return object; - }; - - /** - * Converts this Http to JSON. - * @function toJSON - * @memberof google.api.Http - * @instance - * @returns {Object.} JSON object - */ - Http.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Http - * @function getTypeUrl - * @memberof google.api.Http - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Http.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.Http"; - }; - - return Http; - })(); - - api.HttpRule = (function() { - - /** - * Properties of a HttpRule. - * @memberof google.api - * @interface IHttpRule - * @property {string|null} [selector] HttpRule selector - * @property {string|null} [get] HttpRule get - * @property {string|null} [put] HttpRule put - * @property {string|null} [post] HttpRule post - * @property {string|null} ["delete"] HttpRule delete - * @property {string|null} [patch] HttpRule patch - * @property {google.api.ICustomHttpPattern|null} [custom] HttpRule custom - * @property {string|null} [body] HttpRule body - * @property {string|null} [responseBody] HttpRule responseBody - * @property {Array.|null} [additionalBindings] HttpRule additionalBindings - */ - - /** - * Constructs a new HttpRule. - * @memberof google.api - * @classdesc Represents a HttpRule. - * @implements IHttpRule - * @constructor - * @param {google.api.IHttpRule=} [properties] Properties to set - */ - function HttpRule(properties) { - this.additionalBindings = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * HttpRule selector. - * @member {string} selector - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.selector = ""; - - /** - * HttpRule get. - * @member {string|null|undefined} get - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.get = null; - - /** - * HttpRule put. - * @member {string|null|undefined} put - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.put = null; - - /** - * HttpRule post. - * @member {string|null|undefined} post - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.post = null; - - /** - * HttpRule delete. - * @member {string|null|undefined} delete - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype["delete"] = null; - - /** - * HttpRule patch. - * @member {string|null|undefined} patch - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.patch = null; - - /** - * HttpRule custom. - * @member {google.api.ICustomHttpPattern|null|undefined} custom - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.custom = null; - - /** - * HttpRule body. - * @member {string} body - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.body = ""; - - /** - * HttpRule responseBody. - * @member {string} responseBody - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.responseBody = ""; - - /** - * HttpRule additionalBindings. - * @member {Array.} additionalBindings - * @memberof google.api.HttpRule - * @instance - */ - HttpRule.prototype.additionalBindings = $util.emptyArray; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - /** - * HttpRule pattern. - * @member {"get"|"put"|"post"|"delete"|"patch"|"custom"|undefined} pattern - * @memberof google.api.HttpRule - * @instance - */ - Object.defineProperty(HttpRule.prototype, "pattern", { - get: $util.oneOfGetter($oneOfFields = ["get", "put", "post", "delete", "patch", "custom"]), - set: $util.oneOfSetter($oneOfFields) - }); - - /** - * Creates a new HttpRule instance using the specified properties. - * @function create - * @memberof google.api.HttpRule - * @static - * @param {google.api.IHttpRule=} [properties] Properties to set - * @returns {google.api.HttpRule} HttpRule instance - */ - HttpRule.create = function create(properties) { - return new HttpRule(properties); - }; - - /** - * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @function encode - * @memberof google.api.HttpRule - * @static - * @param {google.api.IHttpRule} message HttpRule message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - HttpRule.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); - if (message.get != null && Object.hasOwnProperty.call(message, "get")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); - if (message.put != null && Object.hasOwnProperty.call(message, "put")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); - if (message.post != null && Object.hasOwnProperty.call(message, "post")) - writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); - if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); - if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); - if (message.body != null && Object.hasOwnProperty.call(message, "body")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); - if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) - $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.additionalBindings != null && message.additionalBindings.length) - for (var i = 0; i < message.additionalBindings.length; ++i) - $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); - if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) - writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); - return writer; - }; - - /** - * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.HttpRule - * @static - * @param {google.api.IHttpRule} message HttpRule message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - HttpRule.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a HttpRule message from the specified reader or buffer. - * @function decode - * @memberof google.api.HttpRule - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.HttpRule} HttpRule - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - HttpRule.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.selector = reader.string(); - break; - } - case 2: { - message.get = reader.string(); - break; - } - case 3: { - message.put = reader.string(); - break; - } - case 4: { - message.post = reader.string(); - break; - } - case 5: { - message["delete"] = reader.string(); - break; - } - case 6: { - message.patch = reader.string(); - break; - } - case 8: { - message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); - break; - } - case 7: { - message.body = reader.string(); - break; - } - case 12: { - message.responseBody = reader.string(); - break; - } - case 11: { - if (!(message.additionalBindings && message.additionalBindings.length)) - message.additionalBindings = []; - message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a HttpRule message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.HttpRule - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.HttpRule} HttpRule - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - HttpRule.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a HttpRule message. - * @function verify - * @memberof google.api.HttpRule - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - HttpRule.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - var properties = {}; - if (message.selector != null && message.hasOwnProperty("selector")) - if (!$util.isString(message.selector)) - return "selector: string expected"; - if (message.get != null && message.hasOwnProperty("get")) { - properties.pattern = 1; - if (!$util.isString(message.get)) - return "get: string expected"; - } - if (message.put != null && message.hasOwnProperty("put")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.put)) - return "put: string expected"; - } - if (message.post != null && message.hasOwnProperty("post")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.post)) - return "post: string expected"; - } - if (message["delete"] != null && message.hasOwnProperty("delete")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message["delete"])) - return "delete: string expected"; - } - if (message.patch != null && message.hasOwnProperty("patch")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - if (!$util.isString(message.patch)) - return "patch: string expected"; - } - if (message.custom != null && message.hasOwnProperty("custom")) { - if (properties.pattern === 1) - return "pattern: multiple values"; - properties.pattern = 1; - { - var error = $root.google.api.CustomHttpPattern.verify(message.custom); - if (error) - return "custom." + error; - } - } - if (message.body != null && message.hasOwnProperty("body")) - if (!$util.isString(message.body)) - return "body: string expected"; - if (message.responseBody != null && message.hasOwnProperty("responseBody")) - if (!$util.isString(message.responseBody)) - return "responseBody: string expected"; - if (message.additionalBindings != null && message.hasOwnProperty("additionalBindings")) { - if (!Array.isArray(message.additionalBindings)) - return "additionalBindings: array expected"; - for (var i = 0; i < message.additionalBindings.length; ++i) { - var error = $root.google.api.HttpRule.verify(message.additionalBindings[i]); - if (error) - return "additionalBindings." + error; - } - } - return null; - }; - - /** - * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.HttpRule - * @static - * @param {Object.} object Plain object - * @returns {google.api.HttpRule} HttpRule - */ - HttpRule.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.HttpRule) - return object; - var message = new $root.google.api.HttpRule(); - if (object.selector != null) - message.selector = String(object.selector); - if (object.get != null) - message.get = String(object.get); - if (object.put != null) - message.put = String(object.put); - if (object.post != null) - message.post = String(object.post); - if (object["delete"] != null) - message["delete"] = String(object["delete"]); - if (object.patch != null) - message.patch = String(object.patch); - if (object.custom != null) { - if (typeof object.custom !== "object") - throw TypeError(".google.api.HttpRule.custom: object expected"); - message.custom = $root.google.api.CustomHttpPattern.fromObject(object.custom); - } - if (object.body != null) - message.body = String(object.body); - if (object.responseBody != null) - message.responseBody = String(object.responseBody); - if (object.additionalBindings) { - if (!Array.isArray(object.additionalBindings)) - throw TypeError(".google.api.HttpRule.additionalBindings: array expected"); - message.additionalBindings = []; - for (var i = 0; i < object.additionalBindings.length; ++i) { - if (typeof object.additionalBindings[i] !== "object") - throw TypeError(".google.api.HttpRule.additionalBindings: object expected"); - message.additionalBindings[i] = $root.google.api.HttpRule.fromObject(object.additionalBindings[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a HttpRule message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.HttpRule - * @static - * @param {google.api.HttpRule} message HttpRule - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - HttpRule.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.additionalBindings = []; - if (options.defaults) { - object.selector = ""; - object.body = ""; - object.responseBody = ""; - } - if (message.selector != null && message.hasOwnProperty("selector")) - object.selector = message.selector; - if (message.get != null && message.hasOwnProperty("get")) { - object.get = message.get; - if (options.oneofs) - object.pattern = "get"; - } - if (message.put != null && message.hasOwnProperty("put")) { - object.put = message.put; - if (options.oneofs) - object.pattern = "put"; - } - if (message.post != null && message.hasOwnProperty("post")) { - object.post = message.post; - if (options.oneofs) - object.pattern = "post"; - } - if (message["delete"] != null && message.hasOwnProperty("delete")) { - object["delete"] = message["delete"]; - if (options.oneofs) - object.pattern = "delete"; - } - if (message.patch != null && message.hasOwnProperty("patch")) { - object.patch = message.patch; - if (options.oneofs) - object.pattern = "patch"; - } - if (message.body != null && message.hasOwnProperty("body")) - object.body = message.body; - if (message.custom != null && message.hasOwnProperty("custom")) { - object.custom = $root.google.api.CustomHttpPattern.toObject(message.custom, options); - if (options.oneofs) - object.pattern = "custom"; - } - if (message.additionalBindings && message.additionalBindings.length) { - object.additionalBindings = []; - for (var j = 0; j < message.additionalBindings.length; ++j) - object.additionalBindings[j] = $root.google.api.HttpRule.toObject(message.additionalBindings[j], options); - } - if (message.responseBody != null && message.hasOwnProperty("responseBody")) - object.responseBody = message.responseBody; - return object; - }; - - /** - * Converts this HttpRule to JSON. - * @function toJSON - * @memberof google.api.HttpRule - * @instance - * @returns {Object.} JSON object - */ - HttpRule.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for HttpRule - * @function getTypeUrl - * @memberof google.api.HttpRule - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - HttpRule.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.HttpRule"; - }; - - return HttpRule; - })(); - - api.CustomHttpPattern = (function() { - - /** - * Properties of a CustomHttpPattern. - * @memberof google.api - * @interface ICustomHttpPattern - * @property {string|null} [kind] CustomHttpPattern kind - * @property {string|null} [path] CustomHttpPattern path - */ - - /** - * Constructs a new CustomHttpPattern. - * @memberof google.api - * @classdesc Represents a CustomHttpPattern. - * @implements ICustomHttpPattern - * @constructor - * @param {google.api.ICustomHttpPattern=} [properties] Properties to set - */ - function CustomHttpPattern(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * CustomHttpPattern kind. - * @member {string} kind - * @memberof google.api.CustomHttpPattern - * @instance - */ - CustomHttpPattern.prototype.kind = ""; - - /** - * CustomHttpPattern path. - * @member {string} path - * @memberof google.api.CustomHttpPattern - * @instance - */ - CustomHttpPattern.prototype.path = ""; - - /** - * Creates a new CustomHttpPattern instance using the specified properties. - * @function create - * @memberof google.api.CustomHttpPattern - * @static - * @param {google.api.ICustomHttpPattern=} [properties] Properties to set - * @returns {google.api.CustomHttpPattern} CustomHttpPattern instance - */ - CustomHttpPattern.create = function create(properties) { - return new CustomHttpPattern(properties); - }; - - /** - * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @function encode - * @memberof google.api.CustomHttpPattern - * @static - * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CustomHttpPattern.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); - if (message.path != null && Object.hasOwnProperty.call(message, "path")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); - return writer; - }; - - /** - * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.CustomHttpPattern - * @static - * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - CustomHttpPattern.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a CustomHttpPattern message from the specified reader or buffer. - * @function decode - * @memberof google.api.CustomHttpPattern - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.CustomHttpPattern} CustomHttpPattern - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CustomHttpPattern.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.kind = reader.string(); - break; - } - case 2: { - message.path = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.CustomHttpPattern - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.CustomHttpPattern} CustomHttpPattern - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - CustomHttpPattern.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a CustomHttpPattern message. - * @function verify - * @memberof google.api.CustomHttpPattern - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - CustomHttpPattern.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.kind != null && message.hasOwnProperty("kind")) - if (!$util.isString(message.kind)) - return "kind: string expected"; - if (message.path != null && message.hasOwnProperty("path")) - if (!$util.isString(message.path)) - return "path: string expected"; - return null; - }; - - /** - * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.CustomHttpPattern - * @static - * @param {Object.} object Plain object - * @returns {google.api.CustomHttpPattern} CustomHttpPattern - */ - CustomHttpPattern.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.CustomHttpPattern) - return object; - var message = new $root.google.api.CustomHttpPattern(); - if (object.kind != null) - message.kind = String(object.kind); - if (object.path != null) - message.path = String(object.path); - return message; - }; - - /** - * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.CustomHttpPattern - * @static - * @param {google.api.CustomHttpPattern} message CustomHttpPattern - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - CustomHttpPattern.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.kind = ""; - object.path = ""; - } - if (message.kind != null && message.hasOwnProperty("kind")) - object.kind = message.kind; - if (message.path != null && message.hasOwnProperty("path")) - object.path = message.path; - return object; - }; - - /** - * Converts this CustomHttpPattern to JSON. - * @function toJSON - * @memberof google.api.CustomHttpPattern - * @instance - * @returns {Object.} JSON object - */ - CustomHttpPattern.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for CustomHttpPattern - * @function getTypeUrl - * @memberof google.api.CustomHttpPattern - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - CustomHttpPattern.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.CustomHttpPattern"; - }; - - return CustomHttpPattern; - })(); - - /** - * FieldBehavior enum. - * @name google.api.FieldBehavior - * @enum {number} - * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value - * @property {number} OPTIONAL=1 OPTIONAL value - * @property {number} REQUIRED=2 REQUIRED value - * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value - * @property {number} INPUT_ONLY=4 INPUT_ONLY value - * @property {number} IMMUTABLE=5 IMMUTABLE value - * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value - * @property {number} NON_EMPTY_DEFAULT=7 NON_EMPTY_DEFAULT value - */ - api.FieldBehavior = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "FIELD_BEHAVIOR_UNSPECIFIED"] = 0; - values[valuesById[1] = "OPTIONAL"] = 1; - values[valuesById[2] = "REQUIRED"] = 2; - values[valuesById[3] = "OUTPUT_ONLY"] = 3; - values[valuesById[4] = "INPUT_ONLY"] = 4; - values[valuesById[5] = "IMMUTABLE"] = 5; - values[valuesById[6] = "UNORDERED_LIST"] = 6; - values[valuesById[7] = "NON_EMPTY_DEFAULT"] = 7; - return values; - })(); - - api.ResourceDescriptor = (function() { - - /** - * Properties of a ResourceDescriptor. - * @memberof google.api - * @interface IResourceDescriptor - * @property {string|null} [type] ResourceDescriptor type - * @property {Array.|null} [pattern] ResourceDescriptor pattern - * @property {string|null} [nameField] ResourceDescriptor nameField - * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history - * @property {string|null} [plural] ResourceDescriptor plural - * @property {string|null} [singular] ResourceDescriptor singular - * @property {Array.|null} [style] ResourceDescriptor style - */ - - /** - * Constructs a new ResourceDescriptor. - * @memberof google.api - * @classdesc Represents a ResourceDescriptor. - * @implements IResourceDescriptor - * @constructor - * @param {google.api.IResourceDescriptor=} [properties] Properties to set - */ - function ResourceDescriptor(properties) { - this.pattern = []; - this.style = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ResourceDescriptor type. - * @member {string} type - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.type = ""; - - /** - * ResourceDescriptor pattern. - * @member {Array.} pattern - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.pattern = $util.emptyArray; - - /** - * ResourceDescriptor nameField. - * @member {string} nameField - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.nameField = ""; - - /** - * ResourceDescriptor history. - * @member {google.api.ResourceDescriptor.History} history - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.history = 0; - - /** - * ResourceDescriptor plural. - * @member {string} plural - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.plural = ""; - - /** - * ResourceDescriptor singular. - * @member {string} singular - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.singular = ""; - - /** - * ResourceDescriptor style. - * @member {Array.} style - * @memberof google.api.ResourceDescriptor - * @instance - */ - ResourceDescriptor.prototype.style = $util.emptyArray; - - /** - * Creates a new ResourceDescriptor instance using the specified properties. - * @function create - * @memberof google.api.ResourceDescriptor - * @static - * @param {google.api.IResourceDescriptor=} [properties] Properties to set - * @returns {google.api.ResourceDescriptor} ResourceDescriptor instance - */ - ResourceDescriptor.create = function create(properties) { - return new ResourceDescriptor(properties); - }; - - /** - * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @function encode - * @memberof google.api.ResourceDescriptor - * @static - * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ResourceDescriptor.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.pattern != null && message.pattern.length) - for (var i = 0; i < message.pattern.length; ++i) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); - if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); - if (message.history != null && Object.hasOwnProperty.call(message, "history")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); - if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); - if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) - writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); - if (message.style != null && message.style.length) { - writer.uint32(/* id 10, wireType 2 =*/82).fork(); - for (var i = 0; i < message.style.length; ++i) - writer.int32(message.style[i]); - writer.ldelim(); - } - return writer; - }; - - /** - * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.ResourceDescriptor - * @static - * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ResourceDescriptor.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ResourceDescriptor message from the specified reader or buffer. - * @function decode - * @memberof google.api.ResourceDescriptor - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.ResourceDescriptor} ResourceDescriptor - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ResourceDescriptor.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.type = reader.string(); - break; - } - case 2: { - if (!(message.pattern && message.pattern.length)) - message.pattern = []; - message.pattern.push(reader.string()); - break; - } - case 3: { - message.nameField = reader.string(); - break; - } - case 4: { - message.history = reader.int32(); - break; - } - case 5: { - message.plural = reader.string(); - break; - } - case 6: { - message.singular = reader.string(); - break; - } - case 10: { - if (!(message.style && message.style.length)) - message.style = []; - if ((tag & 7) === 2) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) - message.style.push(reader.int32()); - } else - message.style.push(reader.int32()); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.ResourceDescriptor - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.ResourceDescriptor} ResourceDescriptor - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ResourceDescriptor.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ResourceDescriptor message. - * @function verify - * @memberof google.api.ResourceDescriptor - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ResourceDescriptor.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.type != null && message.hasOwnProperty("type")) - if (!$util.isString(message.type)) - return "type: string expected"; - if (message.pattern != null && message.hasOwnProperty("pattern")) { - if (!Array.isArray(message.pattern)) - return "pattern: array expected"; - for (var i = 0; i < message.pattern.length; ++i) - if (!$util.isString(message.pattern[i])) - return "pattern: string[] expected"; - } - if (message.nameField != null && message.hasOwnProperty("nameField")) - if (!$util.isString(message.nameField)) - return "nameField: string expected"; - if (message.history != null && message.hasOwnProperty("history")) - switch (message.history) { - default: - return "history: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.plural != null && message.hasOwnProperty("plural")) - if (!$util.isString(message.plural)) - return "plural: string expected"; - if (message.singular != null && message.hasOwnProperty("singular")) - if (!$util.isString(message.singular)) - return "singular: string expected"; - if (message.style != null && message.hasOwnProperty("style")) { - if (!Array.isArray(message.style)) - return "style: array expected"; - for (var i = 0; i < message.style.length; ++i) - switch (message.style[i]) { - default: - return "style: enum value[] expected"; - case 0: - case 1: - break; - } - } - return null; - }; - - /** - * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.ResourceDescriptor - * @static - * @param {Object.} object Plain object - * @returns {google.api.ResourceDescriptor} ResourceDescriptor - */ - ResourceDescriptor.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.ResourceDescriptor) - return object; - var message = new $root.google.api.ResourceDescriptor(); - if (object.type != null) - message.type = String(object.type); - if (object.pattern) { - if (!Array.isArray(object.pattern)) - throw TypeError(".google.api.ResourceDescriptor.pattern: array expected"); - message.pattern = []; - for (var i = 0; i < object.pattern.length; ++i) - message.pattern[i] = String(object.pattern[i]); - } - if (object.nameField != null) - message.nameField = String(object.nameField); - switch (object.history) { - default: - if (typeof object.history === "number") { - message.history = object.history; - break; - } - break; - case "HISTORY_UNSPECIFIED": - case 0: - message.history = 0; - break; - case "ORIGINALLY_SINGLE_PATTERN": - case 1: - message.history = 1; - break; - case "FUTURE_MULTI_PATTERN": - case 2: - message.history = 2; - break; - } - if (object.plural != null) - message.plural = String(object.plural); - if (object.singular != null) - message.singular = String(object.singular); - if (object.style) { - if (!Array.isArray(object.style)) - throw TypeError(".google.api.ResourceDescriptor.style: array expected"); - message.style = []; - for (var i = 0; i < object.style.length; ++i) - switch (object.style[i]) { - default: - if (typeof object.style[i] === "number") { - message.style[i] = object.style[i]; - break; - } - case "STYLE_UNSPECIFIED": - case 0: - message.style[i] = 0; - break; - case "DECLARATIVE_FRIENDLY": - case 1: - message.style[i] = 1; - break; - } - } - return message; - }; - - /** - * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.ResourceDescriptor - * @static - * @param {google.api.ResourceDescriptor} message ResourceDescriptor - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ResourceDescriptor.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) { - object.pattern = []; - object.style = []; - } - if (options.defaults) { - object.type = ""; - object.nameField = ""; - object.history = options.enums === String ? "HISTORY_UNSPECIFIED" : 0; - object.plural = ""; - object.singular = ""; - } - if (message.type != null && message.hasOwnProperty("type")) - object.type = message.type; - if (message.pattern && message.pattern.length) { - object.pattern = []; - for (var j = 0; j < message.pattern.length; ++j) - object.pattern[j] = message.pattern[j]; - } - if (message.nameField != null && message.hasOwnProperty("nameField")) - object.nameField = message.nameField; - if (message.history != null && message.hasOwnProperty("history")) - object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] === undefined ? message.history : $root.google.api.ResourceDescriptor.History[message.history] : message.history; - if (message.plural != null && message.hasOwnProperty("plural")) - object.plural = message.plural; - if (message.singular != null && message.hasOwnProperty("singular")) - object.singular = message.singular; - if (message.style && message.style.length) { - object.style = []; - for (var j = 0; j < message.style.length; ++j) - object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] === undefined ? message.style[j] : $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; - } - return object; - }; - - /** - * Converts this ResourceDescriptor to JSON. - * @function toJSON - * @memberof google.api.ResourceDescriptor - * @instance - * @returns {Object.} JSON object - */ - ResourceDescriptor.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ResourceDescriptor - * @function getTypeUrl - * @memberof google.api.ResourceDescriptor - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ResourceDescriptor.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.ResourceDescriptor"; - }; - - /** - * History enum. - * @name google.api.ResourceDescriptor.History - * @enum {number} - * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value - * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value - * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value - */ - ResourceDescriptor.History = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "HISTORY_UNSPECIFIED"] = 0; - values[valuesById[1] = "ORIGINALLY_SINGLE_PATTERN"] = 1; - values[valuesById[2] = "FUTURE_MULTI_PATTERN"] = 2; - return values; - })(); - - /** - * Style enum. - * @name google.api.ResourceDescriptor.Style - * @enum {number} - * @property {number} STYLE_UNSPECIFIED=0 STYLE_UNSPECIFIED value - * @property {number} DECLARATIVE_FRIENDLY=1 DECLARATIVE_FRIENDLY value - */ - ResourceDescriptor.Style = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STYLE_UNSPECIFIED"] = 0; - values[valuesById[1] = "DECLARATIVE_FRIENDLY"] = 1; - return values; - })(); - - return ResourceDescriptor; - })(); - - api.ResourceReference = (function() { - - /** - * Properties of a ResourceReference. - * @memberof google.api - * @interface IResourceReference - * @property {string|null} [type] ResourceReference type - * @property {string|null} [childType] ResourceReference childType - */ - - /** - * Constructs a new ResourceReference. - * @memberof google.api - * @classdesc Represents a ResourceReference. - * @implements IResourceReference - * @constructor - * @param {google.api.IResourceReference=} [properties] Properties to set - */ - function ResourceReference(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ResourceReference type. - * @member {string} type - * @memberof google.api.ResourceReference - * @instance - */ - ResourceReference.prototype.type = ""; - - /** - * ResourceReference childType. - * @member {string} childType - * @memberof google.api.ResourceReference - * @instance - */ - ResourceReference.prototype.childType = ""; - - /** - * Creates a new ResourceReference instance using the specified properties. - * @function create - * @memberof google.api.ResourceReference - * @static - * @param {google.api.IResourceReference=} [properties] Properties to set - * @returns {google.api.ResourceReference} ResourceReference instance - */ - ResourceReference.create = function create(properties) { - return new ResourceReference(properties); - }; - - /** - * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @function encode - * @memberof google.api.ResourceReference - * @static - * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ResourceReference.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.type != null && Object.hasOwnProperty.call(message, "type")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); - return writer; - }; - - /** - * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.ResourceReference - * @static - * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ResourceReference.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a ResourceReference message from the specified reader or buffer. - * @function decode - * @memberof google.api.ResourceReference - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.ResourceReference} ResourceReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ResourceReference.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.type = reader.string(); - break; - } - case 2: { - message.childType = reader.string(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a ResourceReference message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.ResourceReference - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.ResourceReference} ResourceReference - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ResourceReference.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a ResourceReference message. - * @function verify - * @memberof google.api.ResourceReference - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ResourceReference.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.type != null && message.hasOwnProperty("type")) - if (!$util.isString(message.type)) - return "type: string expected"; - if (message.childType != null && message.hasOwnProperty("childType")) - if (!$util.isString(message.childType)) - return "childType: string expected"; - return null; - }; - - /** - * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.ResourceReference - * @static - * @param {Object.} object Plain object - * @returns {google.api.ResourceReference} ResourceReference - */ - ResourceReference.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.ResourceReference) - return object; - var message = new $root.google.api.ResourceReference(); - if (object.type != null) - message.type = String(object.type); - if (object.childType != null) - message.childType = String(object.childType); - return message; - }; - - /** - * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.ResourceReference - * @static - * @param {google.api.ResourceReference} message ResourceReference - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ResourceReference.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.type = ""; - object.childType = ""; - } - if (message.type != null && message.hasOwnProperty("type")) - object.type = message.type; - if (message.childType != null && message.hasOwnProperty("childType")) - object.childType = message.childType; - return object; - }; - - /** - * Converts this ResourceReference to JSON. - * @function toJSON - * @memberof google.api.ResourceReference - * @instance - * @returns {Object.} JSON object - */ - ResourceReference.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ResourceReference - * @function getTypeUrl - * @memberof google.api.ResourceReference - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ResourceReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.ResourceReference"; - }; - - return ResourceReference; - })(); - - return api; - })(); - - google.rpc = (function() { - - /** - * Namespace rpc. - * @memberof google - * @namespace - */ - var rpc = {}; - - rpc.Status = (function() { - - /** - * Properties of a Status. - * @memberof google.rpc - * @interface IStatus - * @property {number|null} [code] Status code - * @property {string|null} [message] Status message - * @property {Array.|null} [details] Status details - */ - - /** - * Constructs a new Status. - * @memberof google.rpc - * @classdesc Represents a Status. - * @implements IStatus - * @constructor - * @param {google.rpc.IStatus=} [properties] Properties to set - */ - function Status(properties) { - this.details = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Status code. - * @member {number} code - * @memberof google.rpc.Status - * @instance - */ - Status.prototype.code = 0; - - /** - * Status message. - * @member {string} message - * @memberof google.rpc.Status - * @instance - */ - Status.prototype.message = ""; - - /** - * Status details. - * @member {Array.} details - * @memberof google.rpc.Status - * @instance - */ - Status.prototype.details = $util.emptyArray; - - /** - * Creates a new Status instance using the specified properties. - * @function create - * @memberof google.rpc.Status - * @static - * @param {google.rpc.IStatus=} [properties] Properties to set - * @returns {google.rpc.Status} Status instance - */ - Status.create = function create(properties) { - return new Status(properties); - }; - - /** - * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. - * @function encode - * @memberof google.rpc.Status - * @static - * @param {google.rpc.IStatus} message Status message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Status.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.code != null && Object.hasOwnProperty.call(message, "code")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); - if (message.message != null && Object.hasOwnProperty.call(message, "message")) - writer.uint32(/* id 2, wireType 2 =*/18).string(message.message); - if (message.details != null && message.details.length) - for (var i = 0; i < message.details.length; ++i) - $root.google.protobuf.Any.encode(message.details[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - return writer; - }; - - /** - * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. - * @function encodeDelimited - * @memberof google.rpc.Status - * @static - * @param {google.rpc.IStatus} message Status message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Status.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Status message from the specified reader or buffer. - * @function decode - * @memberof google.rpc.Status - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.rpc.Status} Status - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Status.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.Status(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - message.code = reader.int32(); - break; - } - case 2: { - message.message = reader.string(); - break; - } - case 3: { - if (!(message.details && message.details.length)) - message.details = []; - message.details.push($root.google.protobuf.Any.decode(reader, reader.uint32())); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Status message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.rpc.Status - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.rpc.Status} Status - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Status.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Status message. - * @function verify - * @memberof google.rpc.Status - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Status.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.code != null && message.hasOwnProperty("code")) - if (!$util.isInteger(message.code)) - return "code: integer expected"; - if (message.message != null && message.hasOwnProperty("message")) - if (!$util.isString(message.message)) - return "message: string expected"; - if (message.details != null && message.hasOwnProperty("details")) { - if (!Array.isArray(message.details)) - return "details: array expected"; - for (var i = 0; i < message.details.length; ++i) { - var error = $root.google.protobuf.Any.verify(message.details[i]); - if (error) - return "details." + error; - } - } - return null; - }; - - /** - * Creates a Status message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.rpc.Status - * @static - * @param {Object.} object Plain object - * @returns {google.rpc.Status} Status - */ - Status.fromObject = function fromObject(object) { - if (object instanceof $root.google.rpc.Status) - return object; - var message = new $root.google.rpc.Status(); - if (object.code != null) - message.code = object.code | 0; - if (object.message != null) - message.message = String(object.message); - if (object.details) { - if (!Array.isArray(object.details)) - throw TypeError(".google.rpc.Status.details: array expected"); - message.details = []; - for (var i = 0; i < object.details.length; ++i) { - if (typeof object.details[i] !== "object") - throw TypeError(".google.rpc.Status.details: object expected"); - message.details[i] = $root.google.protobuf.Any.fromObject(object.details[i]); - } - } - return message; - }; - - /** - * Creates a plain object from a Status message. Also converts values to other types if specified. - * @function toObject - * @memberof google.rpc.Status - * @static - * @param {google.rpc.Status} message Status - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Status.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.details = []; - if (options.defaults) { - object.code = 0; - object.message = ""; - } - if (message.code != null && message.hasOwnProperty("code")) - object.code = message.code; - if (message.message != null && message.hasOwnProperty("message")) - object.message = message.message; - if (message.details && message.details.length) { - object.details = []; - for (var j = 0; j < message.details.length; ++j) - object.details[j] = $root.google.protobuf.Any.toObject(message.details[j], options); - } - return object; - }; - - /** - * Converts this Status to JSON. - * @function toJSON - * @memberof google.rpc.Status - * @instance - * @returns {Object.} JSON object - */ - Status.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Status - * @function getTypeUrl - * @memberof google.rpc.Status - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Status.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.rpc.Status"; - }; - - return Status; - })(); - - return rpc; - })(); - - return google; - })(); - - return $root; -}); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json deleted file mode 100644 index e8388095ced..00000000000 --- a/handwritten/bigquery-storage/protos/protos.json +++ /dev/null @@ -1,2817 +0,0 @@ -{ - "nested": { - "google": { - "nested": { - "cloud": { - "nested": { - "bigquery": { - "nested": { - "storage": { - "nested": { - "v1": { - "options": { - "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", - "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", - "java_package": "com.google.cloud.bigquery.storage.v1", - "java_multiple_files": true, - "java_outer_classname": "TableProto", - "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1", - "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", - "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" - }, - "nested": { - "_columnName": { - "oneof": [ - "columnName" - ] - }, - "columnName": { - "type": "string", - "id": 454943157, - "extend": "google.protobuf.FieldOptions", - "options": { - "proto3_optional": true - } - }, - "ArrowSchema": { - "fields": { - "serializedSchema": { - "type": "bytes", - "id": 1 - } - } - }, - "ArrowRecordBatch": { - "fields": { - "serializedRecordBatch": { - "type": "bytes", - "id": 1 - }, - "rowCount": { - "type": "int64", - "id": 2, - "options": { - "deprecated": true - } - } - } - }, - "ArrowSerializationOptions": { - "fields": { - "bufferCompression": { - "type": "CompressionCodec", - "id": 2 - } - }, - "nested": { - "CompressionCodec": { - "values": { - "COMPRESSION_UNSPECIFIED": 0, - "LZ4_FRAME": 1, - "ZSTD": 2 - } - } - } - }, - "AvroSchema": { - "fields": { - "schema": { - "type": "string", - "id": 1 - } - } - }, - "AvroRows": { - "fields": { - "serializedBinaryRows": { - "type": "bytes", - "id": 1 - }, - "rowCount": { - "type": "int64", - "id": 2, - "options": { - "deprecated": true - } - } - } - }, - "AvroSerializationOptions": { - "fields": { - "enableDisplayNameAttribute": { - "type": "bool", - "id": 1 - } - } - }, - "ProtoSchema": { - "fields": { - "protoDescriptor": { - "type": "google.protobuf.DescriptorProto", - "id": 1 - } - } - }, - "ProtoRows": { - "fields": { - "serializedRows": { - "rule": "repeated", - "type": "bytes", - "id": 1 - } - } - }, - "BigQueryRead": { - "options": { - "(google.api.default_host)": "bigquerystorage.googleapis.com", - "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" - }, - "methods": { - "CreateReadSession": { - "requestType": "CreateReadSessionRequest", - "responseType": "ReadSession", - "options": { - "(google.api.http).post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "parent,read_session,max_stream_count" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "parent,read_session,max_stream_count" - } - ] - }, - "ReadRows": { - "requestType": "ReadRowsRequest", - "responseType": "ReadRowsResponse", - "responseStream": true, - "options": { - "(google.api.http).get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}", - "(google.api.method_signature)": "read_stream,offset" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" - } - }, - { - "(google.api.method_signature)": "read_stream,offset" - } - ] - }, - "SplitReadStream": { - "requestType": "SplitReadStreamRequest", - "responseType": "SplitReadStreamResponse", - "options": { - "(google.api.http).get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" - } - } - ] - } - } - }, - "BigQueryWrite": { - "options": { - "(google.api.default_host)": "bigquerystorage.googleapis.com", - "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platform" - }, - "methods": { - "CreateWriteStream": { - "requestType": "CreateWriteStreamRequest", - "responseType": "WriteStream", - "options": { - "(google.api.http).post": "/v1/{parent=projects/*/datasets/*/tables/*}", - "(google.api.http).body": "write_stream", - "(google.api.method_signature)": "parent,write_stream" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{parent=projects/*/datasets/*/tables/*}", - "body": "write_stream" - } - }, - { - "(google.api.method_signature)": "parent,write_stream" - } - ] - }, - "AppendRows": { - "requestType": "AppendRowsRequest", - "requestStream": true, - "responseType": "AppendRowsResponse", - "responseStream": true, - "options": { - "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "write_stream" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "write_stream" - } - ] - }, - "GetWriteStream": { - "requestType": "GetWriteStreamRequest", - "responseType": "WriteStream", - "options": { - "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "name" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "name" - } - ] - }, - "FinalizeWriteStream": { - "requestType": "FinalizeWriteStreamRequest", - "responseType": "FinalizeWriteStreamResponse", - "options": { - "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "name" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "name" - } - ] - }, - "BatchCommitWriteStreams": { - "requestType": "BatchCommitWriteStreamsRequest", - "responseType": "BatchCommitWriteStreamsResponse", - "options": { - "(google.api.http).get": "/v1/{parent=projects/*/datasets/*/tables/*}", - "(google.api.method_signature)": "parent" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "get": "/v1/{parent=projects/*/datasets/*/tables/*}" - } - }, - { - "(google.api.method_signature)": "parent" - } - ] - }, - "FlushRows": { - "requestType": "FlushRowsRequest", - "responseType": "FlushRowsResponse", - "options": { - "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "write_stream" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "write_stream" - } - ] - } - } - }, - "CreateReadSessionRequest": { - "fields": { - "parent": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" - } - }, - "readSession": { - "type": "ReadSession", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "maxStreamCount": { - "type": "int32", - "id": 3 - }, - "preferredMinStreamCount": { - "type": "int32", - "id": 4 - } - } - }, - "ReadRowsRequest": { - "fields": { - "readStream": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" - } - }, - "offset": { - "type": "int64", - "id": 2 - } - } - }, - "ThrottleState": { - "fields": { - "throttlePercent": { - "type": "int32", - "id": 1 - } - } - }, - "StreamStats": { - "fields": { - "progress": { - "type": "Progress", - "id": 2 - } - }, - "nested": { - "Progress": { - "fields": { - "atResponseStart": { - "type": "double", - "id": 1 - }, - "atResponseEnd": { - "type": "double", - "id": 2 - } - } - } - } - }, - "ReadRowsResponse": { - "oneofs": { - "rows": { - "oneof": [ - "avroRows", - "arrowRecordBatch" - ] - }, - "schema": { - "oneof": [ - "avroSchema", - "arrowSchema" - ] - } - }, - "fields": { - "avroRows": { - "type": "AvroRows", - "id": 3 - }, - "arrowRecordBatch": { - "type": "ArrowRecordBatch", - "id": 4 - }, - "rowCount": { - "type": "int64", - "id": 6 - }, - "stats": { - "type": "StreamStats", - "id": 2 - }, - "throttleState": { - "type": "ThrottleState", - "id": 5 - }, - "avroSchema": { - "type": "AvroSchema", - "id": 7, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "arrowSchema": { - "type": "ArrowSchema", - "id": 8, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - } - } - }, - "SplitReadStreamRequest": { - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" - } - }, - "fraction": { - "type": "double", - "id": 2 - } - } - }, - "SplitReadStreamResponse": { - "fields": { - "primaryStream": { - "type": "ReadStream", - "id": 1 - }, - "remainderStream": { - "type": "ReadStream", - "id": 2 - } - } - }, - "CreateWriteStreamRequest": { - "fields": { - "parent": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" - } - }, - "writeStream": { - "type": "WriteStream", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - } - } - }, - "AppendRowsRequest": { - "oneofs": { - "rows": { - "oneof": [ - "protoRows" - ] - } - }, - "fields": { - "writeStream": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" - } - }, - "offset": { - "type": "google.protobuf.Int64Value", - "id": 2 - }, - "protoRows": { - "type": "ProtoData", - "id": 4 - }, - "traceId": { - "type": "string", - "id": 6 - } - }, - "nested": { - "ProtoData": { - "fields": { - "writerSchema": { - "type": "ProtoSchema", - "id": 1 - }, - "rows": { - "type": "ProtoRows", - "id": 2 - } - } - } - } - }, - "AppendRowsResponse": { - "oneofs": { - "response": { - "oneof": [ - "appendResult", - "error" - ] - } - }, - "fields": { - "appendResult": { - "type": "AppendResult", - "id": 1 - }, - "error": { - "type": "google.rpc.Status", - "id": 2 - }, - "updatedSchema": { - "type": "TableSchema", - "id": 3 - }, - "rowErrors": { - "rule": "repeated", - "type": "RowError", - "id": 4 - }, - "writeStream": { - "type": "string", - "id": 5 - } - }, - "nested": { - "AppendResult": { - "fields": { - "offset": { - "type": "google.protobuf.Int64Value", - "id": 1 - } - } - } - } - }, - "GetWriteStreamRequest": { - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" - } - }, - "view": { - "type": "WriteStreamView", - "id": 3 - } - } - }, - "BatchCommitWriteStreamsRequest": { - "fields": { - "parent": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" - } - }, - "writeStreams": { - "rule": "repeated", - "type": "string", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - } - } - }, - "BatchCommitWriteStreamsResponse": { - "fields": { - "commitTime": { - "type": "google.protobuf.Timestamp", - "id": 1 - }, - "streamErrors": { - "rule": "repeated", - "type": "StorageError", - "id": 2 - } - } - }, - "FinalizeWriteStreamRequest": { - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" - } - } - } - }, - "FinalizeWriteStreamResponse": { - "fields": { - "rowCount": { - "type": "int64", - "id": 1 - } - } - }, - "FlushRowsRequest": { - "fields": { - "writeStream": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" - } - }, - "offset": { - "type": "google.protobuf.Int64Value", - "id": 2 - } - } - }, - "FlushRowsResponse": { - "fields": { - "offset": { - "type": "int64", - "id": 1 - } - } - }, - "StorageError": { - "fields": { - "code": { - "type": "StorageErrorCode", - "id": 1 - }, - "entity": { - "type": "string", - "id": 2 - }, - "errorMessage": { - "type": "string", - "id": 3 - } - }, - "nested": { - "StorageErrorCode": { - "values": { - "STORAGE_ERROR_CODE_UNSPECIFIED": 0, - "TABLE_NOT_FOUND": 1, - "STREAM_ALREADY_COMMITTED": 2, - "STREAM_NOT_FOUND": 3, - "INVALID_STREAM_TYPE": 4, - "INVALID_STREAM_STATE": 5, - "STREAM_FINALIZED": 6, - "SCHEMA_MISMATCH_EXTRA_FIELDS": 7, - "OFFSET_ALREADY_EXISTS": 8, - "OFFSET_OUT_OF_RANGE": 9 - } - } - } - }, - "RowError": { - "fields": { - "index": { - "type": "int64", - "id": 1 - }, - "code": { - "type": "RowErrorCode", - "id": 2 - }, - "message": { - "type": "string", - "id": 3 - } - }, - "nested": { - "RowErrorCode": { - "values": { - "ROW_ERROR_CODE_UNSPECIFIED": 0, - "FIELDS_ERROR": 1 - } - } - } - }, - "DataFormat": { - "values": { - "DATA_FORMAT_UNSPECIFIED": 0, - "AVRO": 1, - "ARROW": 2 - } - }, - "ReadSession": { - "options": { - "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", - "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" - }, - "oneofs": { - "schema": { - "oneof": [ - "avroSchema", - "arrowSchema" - ] - } - }, - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "expireTime": { - "type": "google.protobuf.Timestamp", - "id": 2, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "dataFormat": { - "type": "DataFormat", - "id": 3, - "options": { - "(google.api.field_behavior)": "IMMUTABLE" - } - }, - "avroSchema": { - "type": "AvroSchema", - "id": 4, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "arrowSchema": { - "type": "ArrowSchema", - "id": 5, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "table": { - "type": "string", - "id": 6, - "options": { - "(google.api.field_behavior)": "IMMUTABLE", - "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" - } - }, - "tableModifiers": { - "type": "TableModifiers", - "id": 7, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "readOptions": { - "type": "TableReadOptions", - "id": 8, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "streams": { - "rule": "repeated", - "type": "ReadStream", - "id": 10, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "estimatedTotalBytesScanned": { - "type": "int64", - "id": 12, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "traceId": { - "type": "string", - "id": 13, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - } - }, - "nested": { - "TableModifiers": { - "fields": { - "snapshotTime": { - "type": "google.protobuf.Timestamp", - "id": 1 - } - } - }, - "TableReadOptions": { - "oneofs": { - "outputFormatSerializationOptions": { - "oneof": [ - "arrowSerializationOptions", - "avroSerializationOptions" - ] - } - }, - "fields": { - "selectedFields": { - "rule": "repeated", - "type": "string", - "id": 1 - }, - "rowRestriction": { - "type": "string", - "id": 2 - }, - "arrowSerializationOptions": { - "type": "ArrowSerializationOptions", - "id": 3, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "avroSerializationOptions": { - "type": "AvroSerializationOptions", - "id": 4, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - } - } - } - } - }, - "ReadStream": { - "options": { - "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream", - "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" - }, - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - } - } - }, - "WriteStreamView": { - "values": { - "WRITE_STREAM_VIEW_UNSPECIFIED": 0, - "BASIC": 1, - "FULL": 2 - } - }, - "WriteStream": { - "options": { - "(google.api.resource).type": "bigquerystorage.googleapis.com/WriteStream", - "(google.api.resource).pattern": "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" - }, - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "type": { - "type": "Type", - "id": 2, - "options": { - "(google.api.field_behavior)": "IMMUTABLE" - } - }, - "createTime": { - "type": "google.protobuf.Timestamp", - "id": 3, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "commitTime": { - "type": "google.protobuf.Timestamp", - "id": 4, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "tableSchema": { - "type": "TableSchema", - "id": 5, - "options": { - "(google.api.field_behavior)": "OUTPUT_ONLY" - } - }, - "writeMode": { - "type": "WriteMode", - "id": 7, - "options": { - "(google.api.field_behavior)": "IMMUTABLE" - } - }, - "location": { - "type": "string", - "id": 8, - "options": { - "(google.api.field_behavior)": "IMMUTABLE" - } - } - }, - "nested": { - "Type": { - "values": { - "TYPE_UNSPECIFIED": 0, - "COMMITTED": 1, - "PENDING": 2, - "BUFFERED": 3 - } - }, - "WriteMode": { - "values": { - "WRITE_MODE_UNSPECIFIED": 0, - "INSERT": 1 - } - } - } - }, - "TableSchema": { - "fields": { - "fields": { - "rule": "repeated", - "type": "TableFieldSchema", - "id": 1 - } - } - }, - "TableFieldSchema": { - "fields": { - "name": { - "type": "string", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "type": { - "type": "Type", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "mode": { - "type": "Mode", - "id": 3, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "fields": { - "rule": "repeated", - "type": "TableFieldSchema", - "id": 4, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "description": { - "type": "string", - "id": 6, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "maxLength": { - "type": "int64", - "id": 7, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "precision": { - "type": "int64", - "id": 8, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - }, - "scale": { - "type": "int64", - "id": 9, - "options": { - "(google.api.field_behavior)": "OPTIONAL" - } - } - }, - "nested": { - "Type": { - "values": { - "TYPE_UNSPECIFIED": 0, - "STRING": 1, - "INT64": 2, - "DOUBLE": 3, - "STRUCT": 4, - "BYTES": 5, - "BOOL": 6, - "TIMESTAMP": 7, - "DATE": 8, - "TIME": 9, - "DATETIME": 10, - "GEOGRAPHY": 11, - "NUMERIC": 12, - "BIGNUMERIC": 13, - "INTERVAL": 14, - "JSON": 15 - } - }, - "Mode": { - "values": { - "MODE_UNSPECIFIED": 0, - "NULLABLE": 1, - "REQUIRED": 2, - "REPEATED": 3 - } - } - } - } - } - }, - "v1beta1": { - "options": { - "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", - "java_outer_classname": "TableReferenceProto", - "java_package": "com.google.cloud.bigquery.storage.v1beta1" - }, - "nested": { - "ArrowSchema": { - "fields": { - "serializedSchema": { - "type": "bytes", - "id": 1 - } - } - }, - "ArrowRecordBatch": { - "fields": { - "serializedRecordBatch": { - "type": "bytes", - "id": 1 - }, - "rowCount": { - "type": "int64", - "id": 2 - } - } - }, - "AvroSchema": { - "fields": { - "schema": { - "type": "string", - "id": 1 - } - } - }, - "AvroRows": { - "fields": { - "serializedBinaryRows": { - "type": "bytes", - "id": 1 - }, - "rowCount": { - "type": "int64", - "id": 2 - } - } - }, - "TableReadOptions": { - "fields": { - "selectedFields": { - "rule": "repeated", - "type": "string", - "id": 1 - }, - "rowRestriction": { - "type": "string", - "id": 2 - } - } - }, - "BigQueryStorage": { - "options": { - "(google.api.default_host)": "bigquerystorage.googleapis.com", - "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" - }, - "methods": { - "CreateReadSession": { - "requestType": "CreateReadSessionRequest", - "responseType": "ReadSession", - "options": { - "(google.api.http).post": "/v1beta1/{table_reference.project_id=projects/*}", - "(google.api.http).body": "*", - "(google.api.http).additional_bindings.post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", - "(google.api.http).additional_bindings.body": "*", - "(google.api.method_signature)": "table_reference,parent,requested_streams" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1beta1/{table_reference.project_id=projects/*}", - "body": "*", - "additional_bindings": { - "post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", - "body": "*" - } - } - }, - { - "(google.api.method_signature)": "table_reference,parent,requested_streams" - } - ] - }, - "ReadRows": { - "requestType": "ReadRowsRequest", - "responseType": "ReadRowsResponse", - "responseStream": true, - "options": { - "(google.api.http).get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}", - "(google.api.method_signature)": "read_position" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}" - } - }, - { - "(google.api.method_signature)": "read_position" - } - ] - }, - "BatchCreateReadSessionStreams": { - "requestType": "BatchCreateReadSessionStreamsRequest", - "responseType": "BatchCreateReadSessionStreamsResponse", - "options": { - "(google.api.http).post": "/v1beta1/{session.name=projects/*/sessions/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "session,requested_streams" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1beta1/{session.name=projects/*/sessions/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "session,requested_streams" - } - ] - }, - "FinalizeStream": { - "requestType": "FinalizeStreamRequest", - "responseType": "google.protobuf.Empty", - "options": { - "(google.api.http).post": "/v1beta1/{stream.name=projects/*/streams/*}", - "(google.api.http).body": "*", - "(google.api.method_signature)": "stream" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "post": "/v1beta1/{stream.name=projects/*/streams/*}", - "body": "*" - } - }, - { - "(google.api.method_signature)": "stream" - } - ] - }, - "SplitReadStream": { - "requestType": "SplitReadStreamRequest", - "responseType": "SplitReadStreamResponse", - "options": { - "(google.api.http).get": "/v1beta1/{original_stream.name=projects/*/streams/*}", - "(google.api.method_signature)": "original_stream" - }, - "parsedOptions": [ - { - "(google.api.http)": { - "get": "/v1beta1/{original_stream.name=projects/*/streams/*}" - } - }, - { - "(google.api.method_signature)": "original_stream" - } - ] - } - } - }, - "Stream": { - "options": { - "(google.api.resource).type": "bigquerystorage.googleapis.com/Stream", - "(google.api.resource).pattern": "projects/{project}/locations/{location}/streams/{stream}" - }, - "fields": { - "name": { - "type": "string", - "id": 1 - } - } - }, - "StreamPosition": { - "fields": { - "stream": { - "type": "Stream", - "id": 1 - }, - "offset": { - "type": "int64", - "id": 2 - } - } - }, - "ReadSession": { - "options": { - "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", - "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" - }, - "oneofs": { - "schema": { - "oneof": [ - "avroSchema", - "arrowSchema" - ] - } - }, - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "expireTime": { - "type": "google.protobuf.Timestamp", - "id": 2 - }, - "avroSchema": { - "type": "AvroSchema", - "id": 5 - }, - "arrowSchema": { - "type": "ArrowSchema", - "id": 6 - }, - "streams": { - "rule": "repeated", - "type": "Stream", - "id": 4 - }, - "tableReference": { - "type": "TableReference", - "id": 7 - }, - "tableModifiers": { - "type": "TableModifiers", - "id": 8 - }, - "shardingStrategy": { - "type": "ShardingStrategy", - "id": 9 - } - } - }, - "CreateReadSessionRequest": { - "fields": { - "tableReference": { - "type": "TableReference", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "parent": { - "type": "string", - "id": 6, - "options": { - "(google.api.field_behavior)": "REQUIRED", - "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" - } - }, - "tableModifiers": { - "type": "TableModifiers", - "id": 2 - }, - "requestedStreams": { - "type": "int32", - "id": 3 - }, - "readOptions": { - "type": "TableReadOptions", - "id": 4 - }, - "format": { - "type": "DataFormat", - "id": 5 - }, - "shardingStrategy": { - "type": "ShardingStrategy", - "id": 7 - } - } - }, - "DataFormat": { - "values": { - "DATA_FORMAT_UNSPECIFIED": 0, - "AVRO": 1, - "ARROW": 3 - } - }, - "ShardingStrategy": { - "values": { - "SHARDING_STRATEGY_UNSPECIFIED": 0, - "LIQUID": 1, - "BALANCED": 2 - } - }, - "ReadRowsRequest": { - "fields": { - "readPosition": { - "type": "StreamPosition", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - } - } - }, - "StreamStatus": { - "fields": { - "estimatedRowCount": { - "type": "int64", - "id": 1 - }, - "fractionConsumed": { - "type": "float", - "id": 2 - }, - "progress": { - "type": "Progress", - "id": 4 - }, - "isSplittable": { - "type": "bool", - "id": 3 - } - } - }, - "Progress": { - "fields": { - "atResponseStart": { - "type": "float", - "id": 1 - }, - "atResponseEnd": { - "type": "float", - "id": 2 - } - } - }, - "ThrottleStatus": { - "fields": { - "throttlePercent": { - "type": "int32", - "id": 1 - } - } - }, - "ReadRowsResponse": { - "oneofs": { - "rows": { - "oneof": [ - "avroRows", - "arrowRecordBatch" - ] - } - }, - "fields": { - "avroRows": { - "type": "AvroRows", - "id": 3 - }, - "arrowRecordBatch": { - "type": "ArrowRecordBatch", - "id": 4 - }, - "rowCount": { - "type": "int64", - "id": 6 - }, - "status": { - "type": "StreamStatus", - "id": 2 - }, - "throttleStatus": { - "type": "ThrottleStatus", - "id": 5 - } - } - }, - "BatchCreateReadSessionStreamsRequest": { - "fields": { - "session": { - "type": "ReadSession", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "requestedStreams": { - "type": "int32", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - } - } - }, - "BatchCreateReadSessionStreamsResponse": { - "fields": { - "streams": { - "rule": "repeated", - "type": "Stream", - "id": 1 - } - } - }, - "FinalizeStreamRequest": { - "fields": { - "stream": { - "type": "Stream", - "id": 2, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - } - } - }, - "SplitReadStreamRequest": { - "fields": { - "originalStream": { - "type": "Stream", - "id": 1, - "options": { - "(google.api.field_behavior)": "REQUIRED" - } - }, - "fraction": { - "type": "float", - "id": 2 - } - } - }, - "SplitReadStreamResponse": { - "fields": { - "primaryStream": { - "type": "Stream", - "id": 1 - }, - "remainderStream": { - "type": "Stream", - "id": 2 - } - } - }, - "TableReference": { - "fields": { - "projectId": { - "type": "string", - "id": 1 - }, - "datasetId": { - "type": "string", - "id": 2 - }, - "tableId": { - "type": "string", - "id": 3 - } - } - }, - "TableModifiers": { - "fields": { - "snapshotTime": { - "type": "google.protobuf.Timestamp", - "id": 1 - } - } - } - } - } - } - } - } - } - } - }, - "protobuf": { - "options": { - "go_package": "google.golang.org/protobuf/types/descriptorpb", - "java_package": "com.google.protobuf", - "java_outer_classname": "DescriptorProtos", - "csharp_namespace": "Google.Protobuf.Reflection", - "objc_class_prefix": "GPB", - "cc_enable_arenas": true, - "optimize_for": "SPEED" - }, - "nested": { - "FileDescriptorSet": { - "fields": { - "file": { - "rule": "repeated", - "type": "FileDescriptorProto", - "id": 1 - } - } - }, - "FileDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "package": { - "type": "string", - "id": 2 - }, - "dependency": { - "rule": "repeated", - "type": "string", - "id": 3 - }, - "publicDependency": { - "rule": "repeated", - "type": "int32", - "id": 10, - "options": { - "packed": false - } - }, - "weakDependency": { - "rule": "repeated", - "type": "int32", - "id": 11, - "options": { - "packed": false - } - }, - "messageType": { - "rule": "repeated", - "type": "DescriptorProto", - "id": 4 - }, - "enumType": { - "rule": "repeated", - "type": "EnumDescriptorProto", - "id": 5 - }, - "service": { - "rule": "repeated", - "type": "ServiceDescriptorProto", - "id": 6 - }, - "extension": { - "rule": "repeated", - "type": "FieldDescriptorProto", - "id": 7 - }, - "options": { - "type": "FileOptions", - "id": 8 - }, - "sourceCodeInfo": { - "type": "SourceCodeInfo", - "id": 9 - }, - "syntax": { - "type": "string", - "id": 12 - }, - "edition": { - "type": "string", - "id": 13 - } - } - }, - "DescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "field": { - "rule": "repeated", - "type": "FieldDescriptorProto", - "id": 2 - }, - "extension": { - "rule": "repeated", - "type": "FieldDescriptorProto", - "id": 6 - }, - "nestedType": { - "rule": "repeated", - "type": "DescriptorProto", - "id": 3 - }, - "enumType": { - "rule": "repeated", - "type": "EnumDescriptorProto", - "id": 4 - }, - "extensionRange": { - "rule": "repeated", - "type": "ExtensionRange", - "id": 5 - }, - "oneofDecl": { - "rule": "repeated", - "type": "OneofDescriptorProto", - "id": 8 - }, - "options": { - "type": "MessageOptions", - "id": 7 - }, - "reservedRange": { - "rule": "repeated", - "type": "ReservedRange", - "id": 9 - }, - "reservedName": { - "rule": "repeated", - "type": "string", - "id": 10 - } - }, - "nested": { - "ExtensionRange": { - "fields": { - "start": { - "type": "int32", - "id": 1 - }, - "end": { - "type": "int32", - "id": 2 - }, - "options": { - "type": "ExtensionRangeOptions", - "id": 3 - } - } - }, - "ReservedRange": { - "fields": { - "start": { - "type": "int32", - "id": 1 - }, - "end": { - "type": "int32", - "id": 2 - } - } - } - } - }, - "ExtensionRangeOptions": { - "fields": { - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ] - }, - "FieldDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "number": { - "type": "int32", - "id": 3 - }, - "label": { - "type": "Label", - "id": 4 - }, - "type": { - "type": "Type", - "id": 5 - }, - "typeName": { - "type": "string", - "id": 6 - }, - "extendee": { - "type": "string", - "id": 2 - }, - "defaultValue": { - "type": "string", - "id": 7 - }, - "oneofIndex": { - "type": "int32", - "id": 9 - }, - "jsonName": { - "type": "string", - "id": 10 - }, - "options": { - "type": "FieldOptions", - "id": 8 - }, - "proto3Optional": { - "type": "bool", - "id": 17 - } - }, - "nested": { - "Type": { - "values": { - "TYPE_DOUBLE": 1, - "TYPE_FLOAT": 2, - "TYPE_INT64": 3, - "TYPE_UINT64": 4, - "TYPE_INT32": 5, - "TYPE_FIXED64": 6, - "TYPE_FIXED32": 7, - "TYPE_BOOL": 8, - "TYPE_STRING": 9, - "TYPE_GROUP": 10, - "TYPE_MESSAGE": 11, - "TYPE_BYTES": 12, - "TYPE_UINT32": 13, - "TYPE_ENUM": 14, - "TYPE_SFIXED32": 15, - "TYPE_SFIXED64": 16, - "TYPE_SINT32": 17, - "TYPE_SINT64": 18 - } - }, - "Label": { - "values": { - "LABEL_OPTIONAL": 1, - "LABEL_REQUIRED": 2, - "LABEL_REPEATED": 3 - } - } - } - }, - "OneofDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "options": { - "type": "OneofOptions", - "id": 2 - } - } - }, - "EnumDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "value": { - "rule": "repeated", - "type": "EnumValueDescriptorProto", - "id": 2 - }, - "options": { - "type": "EnumOptions", - "id": 3 - }, - "reservedRange": { - "rule": "repeated", - "type": "EnumReservedRange", - "id": 4 - }, - "reservedName": { - "rule": "repeated", - "type": "string", - "id": 5 - } - }, - "nested": { - "EnumReservedRange": { - "fields": { - "start": { - "type": "int32", - "id": 1 - }, - "end": { - "type": "int32", - "id": 2 - } - } - } - } - }, - "EnumValueDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "number": { - "type": "int32", - "id": 2 - }, - "options": { - "type": "EnumValueOptions", - "id": 3 - } - } - }, - "ServiceDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "method": { - "rule": "repeated", - "type": "MethodDescriptorProto", - "id": 2 - }, - "options": { - "type": "ServiceOptions", - "id": 3 - } - } - }, - "MethodDescriptorProto": { - "fields": { - "name": { - "type": "string", - "id": 1 - }, - "inputType": { - "type": "string", - "id": 2 - }, - "outputType": { - "type": "string", - "id": 3 - }, - "options": { - "type": "MethodOptions", - "id": 4 - }, - "clientStreaming": { - "type": "bool", - "id": 5, - "options": { - "default": false - } - }, - "serverStreaming": { - "type": "bool", - "id": 6, - "options": { - "default": false - } - } - } - }, - "FileOptions": { - "fields": { - "javaPackage": { - "type": "string", - "id": 1 - }, - "javaOuterClassname": { - "type": "string", - "id": 8 - }, - "javaMultipleFiles": { - "type": "bool", - "id": 10, - "options": { - "default": false - } - }, - "javaGenerateEqualsAndHash": { - "type": "bool", - "id": 20, - "options": { - "deprecated": true - } - }, - "javaStringCheckUtf8": { - "type": "bool", - "id": 27, - "options": { - "default": false - } - }, - "optimizeFor": { - "type": "OptimizeMode", - "id": 9, - "options": { - "default": "SPEED" - } - }, - "goPackage": { - "type": "string", - "id": 11 - }, - "ccGenericServices": { - "type": "bool", - "id": 16, - "options": { - "default": false - } - }, - "javaGenericServices": { - "type": "bool", - "id": 17, - "options": { - "default": false - } - }, - "pyGenericServices": { - "type": "bool", - "id": 18, - "options": { - "default": false - } - }, - "phpGenericServices": { - "type": "bool", - "id": 42, - "options": { - "default": false - } - }, - "deprecated": { - "type": "bool", - "id": 23, - "options": { - "default": false - } - }, - "ccEnableArenas": { - "type": "bool", - "id": 31, - "options": { - "default": true - } - }, - "objcClassPrefix": { - "type": "string", - "id": 36 - }, - "csharpNamespace": { - "type": "string", - "id": 37 - }, - "swiftPrefix": { - "type": "string", - "id": 39 - }, - "phpClassPrefix": { - "type": "string", - "id": 40 - }, - "phpNamespace": { - "type": "string", - "id": 41 - }, - "phpMetadataNamespace": { - "type": "string", - "id": 44 - }, - "rubyPackage": { - "type": "string", - "id": 45 - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ], - "reserved": [ - [ - 38, - 38 - ] - ], - "nested": { - "OptimizeMode": { - "values": { - "SPEED": 1, - "CODE_SIZE": 2, - "LITE_RUNTIME": 3 - } - } - } - }, - "MessageOptions": { - "fields": { - "messageSetWireFormat": { - "type": "bool", - "id": 1, - "options": { - "default": false - } - }, - "noStandardDescriptorAccessor": { - "type": "bool", - "id": 2, - "options": { - "default": false - } - }, - "deprecated": { - "type": "bool", - "id": 3, - "options": { - "default": false - } - }, - "mapEntry": { - "type": "bool", - "id": 7 - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ], - "reserved": [ - [ - 4, - 4 - ], - [ - 5, - 5 - ], - [ - 6, - 6 - ], - [ - 8, - 8 - ], - [ - 9, - 9 - ] - ] - }, - "FieldOptions": { - "fields": { - "ctype": { - "type": "CType", - "id": 1, - "options": { - "default": "STRING" - } - }, - "packed": { - "type": "bool", - "id": 2 - }, - "jstype": { - "type": "JSType", - "id": 6, - "options": { - "default": "JS_NORMAL" - } - }, - "lazy": { - "type": "bool", - "id": 5, - "options": { - "default": false - } - }, - "unverifiedLazy": { - "type": "bool", - "id": 15, - "options": { - "default": false - } - }, - "deprecated": { - "type": "bool", - "id": 3, - "options": { - "default": false - } - }, - "weak": { - "type": "bool", - "id": 10, - "options": { - "default": false - } - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ], - "reserved": [ - [ - 4, - 4 - ] - ], - "nested": { - "CType": { - "values": { - "STRING": 0, - "CORD": 1, - "STRING_PIECE": 2 - } - }, - "JSType": { - "values": { - "JS_NORMAL": 0, - "JS_STRING": 1, - "JS_NUMBER": 2 - } - } - } - }, - "OneofOptions": { - "fields": { - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ] - }, - "EnumOptions": { - "fields": { - "allowAlias": { - "type": "bool", - "id": 2 - }, - "deprecated": { - "type": "bool", - "id": 3, - "options": { - "default": false - } - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ], - "reserved": [ - [ - 5, - 5 - ] - ] - }, - "EnumValueOptions": { - "fields": { - "deprecated": { - "type": "bool", - "id": 1, - "options": { - "default": false - } - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ] - }, - "ServiceOptions": { - "fields": { - "deprecated": { - "type": "bool", - "id": 33, - "options": { - "default": false - } - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ] - }, - "MethodOptions": { - "fields": { - "deprecated": { - "type": "bool", - "id": 33, - "options": { - "default": false - } - }, - "idempotencyLevel": { - "type": "IdempotencyLevel", - "id": 34, - "options": { - "default": "IDEMPOTENCY_UNKNOWN" - } - }, - "uninterpretedOption": { - "rule": "repeated", - "type": "UninterpretedOption", - "id": 999 - } - }, - "extensions": [ - [ - 1000, - 536870911 - ] - ], - "nested": { - "IdempotencyLevel": { - "values": { - "IDEMPOTENCY_UNKNOWN": 0, - "NO_SIDE_EFFECTS": 1, - "IDEMPOTENT": 2 - } - } - } - }, - "UninterpretedOption": { - "fields": { - "name": { - "rule": "repeated", - "type": "NamePart", - "id": 2 - }, - "identifierValue": { - "type": "string", - "id": 3 - }, - "positiveIntValue": { - "type": "uint64", - "id": 4 - }, - "negativeIntValue": { - "type": "int64", - "id": 5 - }, - "doubleValue": { - "type": "double", - "id": 6 - }, - "stringValue": { - "type": "bytes", - "id": 7 - }, - "aggregateValue": { - "type": "string", - "id": 8 - } - }, - "nested": { - "NamePart": { - "fields": { - "namePart": { - "rule": "required", - "type": "string", - "id": 1 - }, - "isExtension": { - "rule": "required", - "type": "bool", - "id": 2 - } - } - } - } - }, - "SourceCodeInfo": { - "fields": { - "location": { - "rule": "repeated", - "type": "Location", - "id": 1 - } - }, - "nested": { - "Location": { - "fields": { - "path": { - "rule": "repeated", - "type": "int32", - "id": 1 - }, - "span": { - "rule": "repeated", - "type": "int32", - "id": 2 - }, - "leadingComments": { - "type": "string", - "id": 3 - }, - "trailingComments": { - "type": "string", - "id": 4 - }, - "leadingDetachedComments": { - "rule": "repeated", - "type": "string", - "id": 6 - } - } - } - } - }, - "GeneratedCodeInfo": { - "fields": { - "annotation": { - "rule": "repeated", - "type": "Annotation", - "id": 1 - } - }, - "nested": { - "Annotation": { - "fields": { - "path": { - "rule": "repeated", - "type": "int32", - "id": 1 - }, - "sourceFile": { - "type": "string", - "id": 2 - }, - "begin": { - "type": "int32", - "id": 3 - }, - "end": { - "type": "int32", - "id": 4 - }, - "semantic": { - "type": "Semantic", - "id": 5 - } - }, - "nested": { - "Semantic": { - "values": { - "NONE": 0, - "SET": 1, - "ALIAS": 2 - } - } - } - } - } - }, - "Timestamp": { - "fields": { - "seconds": { - "type": "int64", - "id": 1 - }, - "nanos": { - "type": "int32", - "id": 2 - } - } - }, - "DoubleValue": { - "fields": { - "value": { - "type": "double", - "id": 1 - } - } - }, - "FloatValue": { - "fields": { - "value": { - "type": "float", - "id": 1 - } - } - }, - "Int64Value": { - "fields": { - "value": { - "type": "int64", - "id": 1 - } - } - }, - "UInt64Value": { - "fields": { - "value": { - "type": "uint64", - "id": 1 - } - } - }, - "Int32Value": { - "fields": { - "value": { - "type": "int32", - "id": 1 - } - } - }, - "UInt32Value": { - "fields": { - "value": { - "type": "uint32", - "id": 1 - } - } - }, - "BoolValue": { - "fields": { - "value": { - "type": "bool", - "id": 1 - } - } - }, - "StringValue": { - "fields": { - "value": { - "type": "string", - "id": 1 - } - } - }, - "BytesValue": { - "fields": { - "value": { - "type": "bytes", - "id": 1 - } - } - }, - "Any": { - "fields": { - "type_url": { - "type": "string", - "id": 1 - }, - "value": { - "type": "bytes", - "id": 2 - } - } - }, - "Empty": { - "fields": {} - } - } - }, - "api": { - "options": { - "go_package": "google.golang.org/genproto/googleapis/api/annotations;annotations", - "java_multiple_files": true, - "java_outer_classname": "ResourceProto", - "java_package": "com.google.api", - "objc_class_prefix": "GAPI", - "cc_enable_arenas": true - }, - "nested": { - "http": { - "type": "HttpRule", - "id": 72295728, - "extend": "google.protobuf.MethodOptions" - }, - "Http": { - "fields": { - "rules": { - "rule": "repeated", - "type": "HttpRule", - "id": 1 - }, - "fullyDecodeReservedExpansion": { - "type": "bool", - "id": 2 - } - } - }, - "HttpRule": { - "oneofs": { - "pattern": { - "oneof": [ - "get", - "put", - "post", - "delete", - "patch", - "custom" - ] - } - }, - "fields": { - "selector": { - "type": "string", - "id": 1 - }, - "get": { - "type": "string", - "id": 2 - }, - "put": { - "type": "string", - "id": 3 - }, - "post": { - "type": "string", - "id": 4 - }, - "delete": { - "type": "string", - "id": 5 - }, - "patch": { - "type": "string", - "id": 6 - }, - "custom": { - "type": "CustomHttpPattern", - "id": 8 - }, - "body": { - "type": "string", - "id": 7 - }, - "responseBody": { - "type": "string", - "id": 12 - }, - "additionalBindings": { - "rule": "repeated", - "type": "HttpRule", - "id": 11 - } - } - }, - "CustomHttpPattern": { - "fields": { - "kind": { - "type": "string", - "id": 1 - }, - "path": { - "type": "string", - "id": 2 - } - } - }, - "methodSignature": { - "rule": "repeated", - "type": "string", - "id": 1051, - "extend": "google.protobuf.MethodOptions" - }, - "defaultHost": { - "type": "string", - "id": 1049, - "extend": "google.protobuf.ServiceOptions" - }, - "oauthScopes": { - "type": "string", - "id": 1050, - "extend": "google.protobuf.ServiceOptions" - }, - "fieldBehavior": { - "rule": "repeated", - "type": "google.api.FieldBehavior", - "id": 1052, - "extend": "google.protobuf.FieldOptions" - }, - "FieldBehavior": { - "values": { - "FIELD_BEHAVIOR_UNSPECIFIED": 0, - "OPTIONAL": 1, - "REQUIRED": 2, - "OUTPUT_ONLY": 3, - "INPUT_ONLY": 4, - "IMMUTABLE": 5, - "UNORDERED_LIST": 6, - "NON_EMPTY_DEFAULT": 7 - } - }, - "resourceReference": { - "type": "google.api.ResourceReference", - "id": 1055, - "extend": "google.protobuf.FieldOptions" - }, - "resourceDefinition": { - "rule": "repeated", - "type": "google.api.ResourceDescriptor", - "id": 1053, - "extend": "google.protobuf.FileOptions" - }, - "resource": { - "type": "google.api.ResourceDescriptor", - "id": 1053, - "extend": "google.protobuf.MessageOptions" - }, - "ResourceDescriptor": { - "fields": { - "type": { - "type": "string", - "id": 1 - }, - "pattern": { - "rule": "repeated", - "type": "string", - "id": 2 - }, - "nameField": { - "type": "string", - "id": 3 - }, - "history": { - "type": "History", - "id": 4 - }, - "plural": { - "type": "string", - "id": 5 - }, - "singular": { - "type": "string", - "id": 6 - }, - "style": { - "rule": "repeated", - "type": "Style", - "id": 10 - } - }, - "nested": { - "History": { - "values": { - "HISTORY_UNSPECIFIED": 0, - "ORIGINALLY_SINGLE_PATTERN": 1, - "FUTURE_MULTI_PATTERN": 2 - } - }, - "Style": { - "values": { - "STYLE_UNSPECIFIED": 0, - "DECLARATIVE_FRIENDLY": 1 - } - } - } - }, - "ResourceReference": { - "fields": { - "type": { - "type": "string", - "id": 1 - }, - "childType": { - "type": "string", - "id": 2 - } - } - } - } - }, - "rpc": { - "options": { - "cc_enable_arenas": true, - "go_package": "google.golang.org/genproto/googleapis/rpc/status;status", - "java_multiple_files": true, - "java_outer_classname": "StatusProto", - "java_package": "com.google.rpc", - "objc_class_prefix": "RPC" - }, - "nested": { - "Status": { - "fields": { - "code": { - "type": "int32", - "id": 1 - }, - "message": { - "type": "string", - "id": 2 - }, - "details": { - "rule": "repeated", - "type": "google.protobuf.Any", - "id": 3 - } - } - } - } - } - } - } - } -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts deleted file mode 100644 index 8d6b763438d..00000000000 --- a/handwritten/bigquery-storage/src/index.ts +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v1 from './v1'; -import * as v1beta1 from './v1beta1'; -const BigQueryReadClient = v1.BigQueryReadClient; -type BigQueryReadClient = v1.BigQueryReadClient; -const BigQueryWriteClient = v1.BigQueryWriteClient; -type BigQueryWriteClient = v1.BigQueryWriteClient; -const BigQueryStorageClient = v1beta1.BigQueryStorageClient; -type BigQueryStorageClient = v1beta1.BigQueryStorageClient; -export { - v1, - BigQueryReadClient, - v1beta1, - BigQueryStorageClient, - BigQueryWriteClient, -}; -// For compatibility with JavaScript libraries we need to provide this default export: -// tslint:disable-next-line no-default-export -export default {v1, BigQueryReadClient, BigQueryWriteClient}; -import * as protos from '../protos/protos'; -export {protos}; diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts deleted file mode 100644 index 1a78d38a741..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ /dev/null @@ -1,938 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type { - Callback, - CallOptions, - Descriptors, - ClientOptions, -} from 'google-gax'; -import {PassThrough} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/big_query_read_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './big_query_read_client_config.json'; -const version = require('../../../package.json').version; - -/** - * BigQuery Read API. - * - * The Read API can be used to read data from BigQuery. - * @class - * @memberof v1 - */ -export class BigQueryReadClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - bigQueryReadStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of BigQueryReadClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryReadClient({fallback: 'rest'}, gax); - * ``` - */ - constructor( - opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback - ) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof BigQueryReadClient; - const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!( - opts?.servicePath || opts?.apiEndpoint - ); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = - opts?.fallback ?? - (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = this._gaxGrpc.auth as gax.GoogleAuth; - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' - ), - readStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' - ), - tablePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}' - ), - writeStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' - ), - }; - - // Some of the methods on this service provide streaming responses. - // Provide descriptors for these. - this.descriptors.stream = { - readRows: new this._gaxModule.StreamDescriptor( - this._gaxModule.StreamType.SERVER_STREAMING, - opts.fallback === 'rest' - ), - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.storage.v1.BigQueryRead', - gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} - ); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.bigQueryReadStub) { - return this.bigQueryReadStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.storage.v1.BigQueryRead. - this.bigQueryReadStub = this._gaxGrpc.createStub( - this._opts.fallback - ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1.BigQueryRead' - ) - : // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, - this._opts, - this._providedCustomServicePath - ) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const bigQueryReadStubMethods = [ - 'createReadSession', - 'readRows', - 'splitReadStream', - ]; - for (const methodName of bigQueryReadStubMethods) { - const callPromise = this.bigQueryReadStub.then( - stub => - (...args: Array<{}>) => { - if (this._terminated) { - if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); - setImmediate(() => { - stream.emit( - 'error', - new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) - ); - }); - return stream; - } - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error | null | undefined) => () => { - throw err; - } - ); - - const descriptor = this.descriptors.stream[methodName] || undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.bigQueryReadStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId( - callback?: Callback - ): Promise | void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- - /** - * Creates a new read session. A read session divides the contents of a - * BigQuery table into one or more streams, which can then be used to read - * data from the table. The read session also specifies properties of the - * data to be read, such as a list of columns or a push-down filter describing - * the rows to be returned. - * - * A particular row can be read by at most one stream. When the caller has - * reached the end of each stream in the session, then all the data in the - * table has been read. - * - * Data is assigned to each stream such that roughly the same number of - * rows can be read from each stream. Because the server-side unit for - * assigning data is collections of rows, the API does not guarantee that - * each stream will return the same number or rows. Additionally, the - * limits are enforced based on the number of pre-filtered rows, so some - * filters can lead to lopsided assignments. - * - * Read sessions automatically expire 6 hours after they are created and do - * not require manual clean-up by the caller. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The request project that owns the session, in the form of - * `projects/{project_id}`. - * @param {google.cloud.bigquery.storage.v1.ReadSession} request.readSession - * Required. Session to be created. - * @param {number} request.maxStreamCount - * Max initial number of streams. If unset or zero, the server will - * provide a value of streams so as to produce reasonable throughput. Must be - * non-negative. The number of streams may be lower than the requested number, - * depending on the amount parallelism that is reasonable for the table. - * There is a default system max limit of 1,000. - * - * This must be greater than or equal to preferred_min_stream_count. - * Typically, clients should either leave this unset to let the system to - * determine an upper bound OR set this a size for the maximum "units of work" - * it can gracefully handle. - * @param {number} request.preferredMinStreamCount - * The minimum preferred stream count. This parameter can be used to inform - * the service that there is a desired lower bound on the number of streams. - * This is typically a target parallelism of the client (e.g. a Spark - * cluster with N-workers would set this to a low multiple of N to ensure - * good cluster utilization). - * - * The system will make a best effort to provide at least this number of - * streams, but in some cases might provide less. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1.ReadSession}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_read.create_read_session.js - * region_tag:bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async - */ - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - >; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IReadSession, - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'read_session.table': request.readSession!.table ?? '', - }); - this.initialize(); - return this.innerApiCalls.createReadSession(request, options, callback); - } - /** - * Splits a given `ReadStream` into two `ReadStream` objects. These - * `ReadStream` objects are referred to as the primary and the residual - * streams of the split. The original `ReadStream` can still be read from in - * the same manner as before. Both of the returned `ReadStream` objects can - * also be read from, and the rows returned by both child streams will be - * the same as the rows read from the original stream. - * - * Moreover, the two child streams will be allocated back-to-back in the - * original `ReadStream`. Concretely, it is guaranteed that for streams - * original, primary, and residual, that original[0-j] = primary[0-j] and - * original[j-n] = residual[0-m] once the streams have been read to - * completion. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. Name of the stream to split. - * @param {number} request.fraction - * A value in the range (0.0, 1.0) that specifies the fractional point at - * which the original stream should be split. The actual split point is - * evaluated on pre-filtered rows, so if a filter is provided, then there is - * no guarantee that the division of the rows between the new child streams - * will be proportional to this fractional value. Additionally, because the - * server-side unit for assigning data is collections of rows, this fraction - * will always map to a data storage boundary on the server side. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_read.split_read_stream.js - * region_tag:bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async - */ - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - >; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - name: request.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.splitReadStream(request, options, callback); - } - - /** - * Reads rows from the stream in the format prescribed by the ReadSession. - * Each response contains one or more table rows, up to a maximum of 100 MiB - * per response; read requests which attempt to read individual rows larger - * than 100 MiB will fail. - * - * Each request also returns a set of stream statistics reflecting the current - * state of the stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.readStream - * Required. Stream to read rows from. - * @param {number} request.offset - * The offset requested must be less than the last row read from Read. - * Requesting a larger offset is undefined. If not specified, start reading - * from offset zero. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) - * for more details and examples. - * @example include:samples/generated/v1/big_query_read.read_rows.js - * region_tag:bigquerystorage_v1_generated_BigQueryRead_ReadRows_async - */ - readRows( - request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, - options?: CallOptions - ): gax.CancellableStream { - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - read_stream: request.readStream ?? '', - }); - this.initialize(); - return this.innerApiCalls.readRows(request, options); - } - - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project: string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified readSession resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} session - * @returns {string} Resource name string. - */ - readSessionPath(project: string, location: string, session: string) { - return this.pathTemplates.readSessionPathTemplate.render({ - project: project, - location: location, - session: session, - }); - } - - /** - * Parse the project from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the project. - */ - matchProjectFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .project; - } - - /** - * Parse the location from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the location. - */ - matchLocationFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .location; - } - - /** - * Parse the session from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the session. - */ - matchSessionFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .session; - } - - /** - * Return a fully-qualified readStream resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} session - * @param {string} stream - * @returns {string} Resource name string. - */ - readStreamPath( - project: string, - location: string, - session: string, - stream: string - ) { - return this.pathTemplates.readStreamPathTemplate.render({ - project: project, - location: location, - session: session, - stream: stream, - }); - } - - /** - * Parse the project from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the project. - */ - matchProjectFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .project; - } - - /** - * Parse the location from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the location. - */ - matchLocationFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .location; - } - - /** - * Parse the session from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the session. - */ - matchSessionFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .session; - } - - /** - * Parse the stream from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the stream. - */ - matchStreamFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .stream; - } - - /** - * Return a fully-qualified table resource name string. - * - * @param {string} project - * @param {string} dataset - * @param {string} table - * @returns {string} Resource name string. - */ - tablePath(project: string, dataset: string, table: string) { - return this.pathTemplates.tablePathTemplate.render({ - project: project, - dataset: dataset, - table: table, - }); - } - - /** - * Parse the project from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the project. - */ - matchProjectFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).project; - } - - /** - * Parse the dataset from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the dataset. - */ - matchDatasetFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).dataset; - } - - /** - * Parse the table from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the table. - */ - matchTableFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).table; - } - - /** - * Return a fully-qualified writeStream resource name string. - * - * @param {string} project - * @param {string} dataset - * @param {string} table - * @param {string} stream - * @returns {string} Resource name string. - */ - writeStreamPath( - project: string, - dataset: string, - table: string, - stream: string - ) { - return this.pathTemplates.writeStreamPathTemplate.render({ - project: project, - dataset: dataset, - table: table, - stream: stream, - }); - } - - /** - * Parse the project from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the project. - */ - matchProjectFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .project; - } - - /** - * Parse the dataset from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the dataset. - */ - matchDatasetFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .dataset; - } - - /** - * Parse the table from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the table. - */ - matchTableFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .table; - } - - /** - * Parse the stream from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the stream. - */ - matchStreamFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .stream; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.bigQueryReadStub && !this._terminated) { - return this.bigQueryReadStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json deleted file mode 100644 index 42b2735b9fe..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.storage.v1.BigQueryRead": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateReadSession": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ReadRows": { - "timeout_millis": 86400000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "SplitReadStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json deleted file mode 100644 index d730716117c..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", - "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", - "../../protos/google/cloud/bigquery/storage/v1/avro.proto", - "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", - "../../protos/google/cloud/bigquery/storage/v1/storage.proto", - "../../protos/google/cloud/bigquery/storage/v1/stream.proto", - "../../protos/google/cloud/bigquery/storage/v1/table.proto" -] diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts deleted file mode 100644 index 6e002a67e7c..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ /dev/null @@ -1,1213 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type { - Callback, - CallOptions, - Descriptors, - ClientOptions, -} from 'google-gax'; -import {PassThrough} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/big_query_write_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './big_query_write_client_config.json'; -const version = require('../../../package.json').version; - -/** - * BigQuery Write API. - * - * The Write API can be used to write data to BigQuery. - * - * For supplementary information about the Write API, see: - * https://cloud.google.com/bigquery/docs/write-api - * @class - * @memberof v1 - */ -export class BigQueryWriteClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - bigQueryWriteStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of BigQueryWriteClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryWriteClient({fallback: 'rest'}, gax); - * ``` - */ - constructor( - opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback - ) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof BigQueryWriteClient; - const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!( - opts?.servicePath || opts?.apiEndpoint - ); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = - opts?.fallback ?? - (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = this._gaxGrpc.auth as gax.GoogleAuth; - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' - ), - readStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' - ), - tablePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}' - ), - writeStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' - ), - }; - - // Some of the methods on this service provide streaming responses. - // Provide descriptors for these. - this.descriptors.stream = { - appendRows: new this._gaxModule.StreamDescriptor( - this._gaxModule.StreamType.BIDI_STREAMING, - opts.fallback === 'rest' - ), - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.storage.v1.BigQueryWrite', - gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} - ); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.bigQueryWriteStub) { - return this.bigQueryWriteStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.storage.v1.BigQueryWrite. - this.bigQueryWriteStub = this._gaxGrpc.createStub( - this._opts.fallback - ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1.BigQueryWrite' - ) - : // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryWrite, - this._opts, - this._providedCustomServicePath - ) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const bigQueryWriteStubMethods = [ - 'createWriteStream', - 'appendRows', - 'getWriteStream', - 'finalizeWriteStream', - 'batchCommitWriteStreams', - 'flushRows', - ]; - for (const methodName of bigQueryWriteStubMethods) { - const callPromise = this.bigQueryWriteStub.then( - stub => - (...args: Array<{}>) => { - if (this._terminated) { - if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); - setImmediate(() => { - stream.emit( - 'error', - new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) - ); - }); - return stream; - } - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error | null | undefined) => () => { - throw err; - } - ); - - const descriptor = this.descriptors.stream[methodName] || undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.bigQueryWriteStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/bigquery.insertdata', - 'https://www.googleapis.com/auth/cloud-platform', - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId( - callback?: Callback - ): Promise | void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- - /** - * Creates a write stream to the given table. - * Additionally, every table has a special stream named '_default' - * to which data can be written. This stream doesn't need to be created using - * CreateWriteStream. It is a stream that can be used simultaneously by any - * number of clients. Data written to this stream is considered committed as - * soon as an acknowledgement is received. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Reference to the table to which the stream belongs, in the format - * of `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param {google.cloud.bigquery.storage.v1.WriteStream} request.writeStream - * Required. Stream to be created. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.create_write_stream.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async - */ - createWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IWriteStream, - ( - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | undefined - ), - {} | undefined - ] - >; - createWriteStream( - request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createWriteStream( - request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IWriteStream, - ( - | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - parent: request.parent ?? '', - }); - this.initialize(); - return this.innerApiCalls.createWriteStream(request, options, callback); - } - /** - * Gets information about a write stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. Name of the stream to get, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - * @param {google.cloud.bigquery.storage.v1.WriteStreamView} request.view - * Indicates whether to get full or partial view of the WriteStream. If - * not set, view returned will be basic. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.get_write_stream.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async - */ - getWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IWriteStream, - ( - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | undefined - ), - {} | undefined - ] - >; - getWriteStream( - request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - getWriteStream( - request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - getWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IWriteStream, - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IWriteStream, - ( - | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - name: request.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.getWriteStream(request, options, callback); - } - /** - * Finalize a write stream so that no new data can be appended to the - * stream. Finalize is not supported on the '_default' stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. Name of the stream to finalize, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.finalize_write_stream.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async - */ - finalizeWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | undefined - ), - {} | undefined - ] - >; - finalizeWriteStream( - request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - finalizeWriteStream( - request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - finalizeWriteStream( - request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - name: request.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.finalizeWriteStream(request, options, callback); - } - /** - * Atomically commits a group of `PENDING` streams that belong to the same - * `parent` table. - * - * Streams must be finalized before commit and cannot be committed multiple - * times. Once a stream is committed, data in the stream becomes available - * for read operations. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Parent table that all the streams should belong to, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param {string[]} request.writeStreams - * Required. The group of streams that will be committed atomically. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.batch_commit_write_streams.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async - */ - batchCommitWriteStreams( - request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - ( - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | undefined - ), - {} | undefined - ] - >; - batchCommitWriteStreams( - request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - batchCommitWriteStreams( - request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - batchCommitWriteStreams( - request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, - ( - | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - parent: request.parent ?? '', - }); - this.initialize(); - return this.innerApiCalls.batchCommitWriteStreams( - request, - options, - callback - ); - } - /** - * Flushes rows to a BUFFERED stream. - * - * If users are appending rows to BUFFERED stream, flush operation is - * required in order for the rows to become available for reading. A - * Flush operation flushes up to any previously flushed offset in a BUFFERED - * stream, to the offset specified in the request. - * - * Flush is not supported on the _default stream, since it is not BUFFERED. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.writeStream - * Required. The stream that is the target of the flush operation. - * @param {google.protobuf.Int64Value} request.offset - * Ending offset of the flush operation. Rows before this offset(including - * this offset) will be flushed. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.flush_rows.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async - */ - flushRows( - request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, - {} | undefined - ] - >; - flushRows( - request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - flushRows( - request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - flushRows( - request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, - protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - write_stream: request.writeStream ?? '', - }); - this.initialize(); - return this.innerApiCalls.flushRows(request, options, callback); - } - - /** - * Appends data to the given stream. - * - * If `offset` is specified, the `offset` is checked against the end of - * stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an - * attempt is made to append to an offset beyond the current end of the stream - * or `ALREADY_EXISTS` if user provides an `offset` that has already been - * written to. User can retry with adjusted offset within the same RPC - * connection. If `offset` is not specified, append happens at the end of the - * stream. - * - * The response contains an optional offset at which the append - * happened. No offset information will be returned for appends to a - * default stream. - * - * Responses are received in the same order in which requests are sent. - * There will be one response for each successful inserted request. Responses - * may optionally embed error information if the originating AppendRequest was - * not successfully processed. - * - * The specifics of when successfully appended data is made visible to the - * table are governed by the type of stream: - * - * * For COMMITTED streams (which includes the default stream), data is - * visible immediately upon successful append. - * - * * For BUFFERED streams, data is made visible via a subsequent `FlushRows` - * rpc which advances a cursor to a newer offset in the stream. - * - * * For PENDING streams, data is not made visible until the stream itself is - * finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly - * committed via the `BatchCommitWriteStreams` rpc. - * - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which is both readable and writable. It accepts objects - * representing [AppendRowsRequest]{@link google.cloud.bigquery.storage.v1.AppendRowsRequest} for write() method, and - * will emit objects representing [AppendRowsResponse]{@link google.cloud.bigquery.storage.v1.AppendRowsResponse} on 'data' event asynchronously. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) - * for more details and examples. - * @example include:samples/generated/v1/big_query_write.append_rows.js - * region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async - */ - appendRows(options?: CallOptions): gax.CancellableStream { - this.initialize(); - return this.innerApiCalls.appendRows(null, options); - } - - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project: string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified readSession resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} session - * @returns {string} Resource name string. - */ - readSessionPath(project: string, location: string, session: string) { - return this.pathTemplates.readSessionPathTemplate.render({ - project: project, - location: location, - session: session, - }); - } - - /** - * Parse the project from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the project. - */ - matchProjectFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .project; - } - - /** - * Parse the location from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the location. - */ - matchLocationFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .location; - } - - /** - * Parse the session from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the session. - */ - matchSessionFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .session; - } - - /** - * Return a fully-qualified readStream resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} session - * @param {string} stream - * @returns {string} Resource name string. - */ - readStreamPath( - project: string, - location: string, - session: string, - stream: string - ) { - return this.pathTemplates.readStreamPathTemplate.render({ - project: project, - location: location, - session: session, - stream: stream, - }); - } - - /** - * Parse the project from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the project. - */ - matchProjectFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .project; - } - - /** - * Parse the location from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the location. - */ - matchLocationFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .location; - } - - /** - * Parse the session from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the session. - */ - matchSessionFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .session; - } - - /** - * Parse the stream from ReadStream resource. - * - * @param {string} readStreamName - * A fully-qualified path representing ReadStream resource. - * @returns {string} A string representing the stream. - */ - matchStreamFromReadStreamName(readStreamName: string) { - return this.pathTemplates.readStreamPathTemplate.match(readStreamName) - .stream; - } - - /** - * Return a fully-qualified table resource name string. - * - * @param {string} project - * @param {string} dataset - * @param {string} table - * @returns {string} Resource name string. - */ - tablePath(project: string, dataset: string, table: string) { - return this.pathTemplates.tablePathTemplate.render({ - project: project, - dataset: dataset, - table: table, - }); - } - - /** - * Parse the project from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the project. - */ - matchProjectFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).project; - } - - /** - * Parse the dataset from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the dataset. - */ - matchDatasetFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).dataset; - } - - /** - * Parse the table from Table resource. - * - * @param {string} tableName - * A fully-qualified path representing Table resource. - * @returns {string} A string representing the table. - */ - matchTableFromTableName(tableName: string) { - return this.pathTemplates.tablePathTemplate.match(tableName).table; - } - - /** - * Return a fully-qualified writeStream resource name string. - * - * @param {string} project - * @param {string} dataset - * @param {string} table - * @param {string} stream - * @returns {string} Resource name string. - */ - writeStreamPath( - project: string, - dataset: string, - table: string, - stream: string - ) { - return this.pathTemplates.writeStreamPathTemplate.render({ - project: project, - dataset: dataset, - table: table, - stream: stream, - }); - } - - /** - * Parse the project from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the project. - */ - matchProjectFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .project; - } - - /** - * Parse the dataset from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the dataset. - */ - matchDatasetFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .dataset; - } - - /** - * Parse the table from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the table. - */ - matchTableFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .table; - } - - /** - * Parse the stream from WriteStream resource. - * - * @param {string} writeStreamName - * A fully-qualified path representing WriteStream resource. - * @returns {string} A string representing the stream. - */ - matchStreamFromWriteStreamName(writeStreamName: string) { - return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) - .stream; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.bigQueryWriteStub && !this._terminated) { - return this.bigQueryWriteStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json deleted file mode 100644 index 4b7f4b0657b..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.storage.v1.BigQueryWrite": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "deadline_exceeded_resource_exhausted_unavailable": [ - "DEADLINE_EXCEEDED", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ec82364a95d03873ac5f61710bb6b9b42e40f31d": { - "initial_retry_delay_millis": 10000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 120000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateWriteStream": { - "timeout_millis": 1200000, - "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", - "retry_params_name": "ec82364a95d03873ac5f61710bb6b9b42e40f31d" - }, - "AppendRows": { - "timeout_millis": 86400000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "GetWriteStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "FinalizeWriteStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "BatchCommitWriteStreams": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "FlushRows": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json deleted file mode 100644 index d730716117c..00000000000 --- a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", - "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", - "../../protos/google/cloud/bigquery/storage/v1/avro.proto", - "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", - "../../protos/google/cloud/bigquery/storage/v1/storage.proto", - "../../protos/google/cloud/bigquery/storage/v1/stream.proto", - "../../protos/google/cloud/bigquery/storage/v1/table.proto" -] diff --git a/handwritten/bigquery-storage/src/v1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1/gapic_metadata.json deleted file mode 100644 index f52c2dae123..00000000000 --- a/handwritten/bigquery-storage/src/v1/gapic_metadata.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.storage.v1", - "libraryPackage": "@google-cloud/bigquery-storage", - "services": { - "BigQueryRead": { - "clients": { - "grpc": { - "libraryClient": "BigQueryReadClient", - "rpcs": { - "CreateReadSession": { - "methods": [ - "createReadSession" - ] - }, - "SplitReadStream": { - "methods": [ - "splitReadStream" - ] - }, - "ReadRows": { - "methods": [ - "readRows" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "BigQueryReadClient", - "rpcs": { - "CreateReadSession": { - "methods": [ - "createReadSession" - ] - }, - "SplitReadStream": { - "methods": [ - "splitReadStream" - ] - } - } - } - } - }, - "BigQueryWrite": { - "clients": { - "grpc": { - "libraryClient": "BigQueryWriteClient", - "rpcs": { - "CreateWriteStream": { - "methods": [ - "createWriteStream" - ] - }, - "GetWriteStream": { - "methods": [ - "getWriteStream" - ] - }, - "FinalizeWriteStream": { - "methods": [ - "finalizeWriteStream" - ] - }, - "BatchCommitWriteStreams": { - "methods": [ - "batchCommitWriteStreams" - ] - }, - "FlushRows": { - "methods": [ - "flushRows" - ] - }, - "AppendRows": { - "methods": [ - "appendRows" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "BigQueryWriteClient", - "rpcs": { - "CreateWriteStream": { - "methods": [ - "createWriteStream" - ] - }, - "GetWriteStream": { - "methods": [ - "getWriteStream" - ] - }, - "FinalizeWriteStream": { - "methods": [ - "finalizeWriteStream" - ] - }, - "BatchCommitWriteStreams": { - "methods": [ - "batchCommitWriteStreams" - ] - }, - "FlushRows": { - "methods": [ - "flushRows" - ] - } - } - } - } - } - } -} diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts deleted file mode 100644 index f3bacd94214..00000000000 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {BigQueryReadClient} from './big_query_read_client'; -export {BigQueryWriteClient} from './big_query_write_client'; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts deleted file mode 100644 index 19120696d61..00000000000 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ /dev/null @@ -1,1004 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type { - Callback, - CallOptions, - Descriptors, - ClientOptions, -} from 'google-gax'; -import {PassThrough} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1beta1/big_query_storage_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './big_query_storage_client_config.json'; -const version = require('../../../package.json').version; - -/** - * BigQuery storage API. - * - * The BigQuery storage API can be used to read data stored in BigQuery. - * @class - * @memberof v1beta1 - */ -export class BigQueryStorageClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - bigQueryStorageStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of BigQueryStorageClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryStorageClient({fallback: 'rest'}, gax); - * ``` - */ - constructor( - opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback - ) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof BigQueryStorageClient; - const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!( - opts?.servicePath || opts?.apiEndpoint - ); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = - opts?.fallback ?? - (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = this._gaxGrpc.auth as gax.GoogleAuth; - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' - ), - streamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/streams/{stream}' - ), - }; - - // Some of the methods on this service provide streaming responses. - // Provide descriptors for these. - this.descriptors.stream = { - readRows: new this._gaxModule.StreamDescriptor( - this._gaxModule.StreamType.SERVER_STREAMING, - opts.fallback === 'rest' - ), - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', - gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} - ); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.bigQueryStorageStub) { - return this.bigQueryStorageStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.storage.v1beta1.BigQueryStorage. - this.bigQueryStorageStub = this._gaxGrpc.createStub( - this._opts.fallback - ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' - ) - : // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.storage.v1beta1 - .BigQueryStorage, - this._opts, - this._providedCustomServicePath - ) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const bigQueryStorageStubMethods = [ - 'createReadSession', - 'readRows', - 'batchCreateReadSessionStreams', - 'finalizeStream', - 'splitReadStream', - ]; - for (const methodName of bigQueryStorageStubMethods) { - const callPromise = this.bigQueryStorageStub.then( - stub => - (...args: Array<{}>) => { - if (this._terminated) { - if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); - setImmediate(() => { - stream.emit( - 'error', - new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) - ); - }); - return stream; - } - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error | null | undefined) => () => { - throw err; - } - ); - - const descriptor = this.descriptors.stream[methodName] || undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.bigQueryStorageStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerystorage.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId( - callback?: Callback - ): Promise | void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- - /** - * Creates a new read session. A read session divides the contents of a - * BigQuery table into one or more streams, which can then be used to read - * data from the table. The read session also specifies properties of the - * data to be read, such as a list of columns or a push-down filter describing - * the rows to be returned. - * - * A particular row can be read by at most one stream. When the caller has - * reached the end of each stream in the session, then all the data in the - * table has been read. - * - * Read sessions automatically expire 24 hours after they are created and do - * not require manual clean-up by the caller. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.TableReference} request.tableReference - * Required. Reference to the table to read. - * @param {string} request.parent - * Required. String of the form `projects/{project_id}` indicating the - * project this ReadSession is associated with. This is the project that will - * be billed for usage. - * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} request.tableModifiers - * Any modifiers to the Table (e.g. snapshot timestamp). - * @param {number} request.requestedStreams - * Initial number of streams. If unset or 0, we will - * provide a value of streams so as to produce reasonable throughput. Must be - * non-negative. The number of streams may be lower than the requested number, - * depending on the amount parallelism that is reasonable for the table and - * the maximum amount of parallelism allowed by the system. - * - * Streams must be read starting from offset 0. - * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} request.readOptions - * Read options for this session (e.g. column selection, filters). - * @param {google.cloud.bigquery.storage.v1beta1.DataFormat} request.format - * Data output format. Currently default to Avro. - * @param {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} request.shardingStrategy - * The strategy to use for distributing data among multiple streams. Currently - * defaults to liquid sharding. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1beta1.ReadSession}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1beta1/big_query_storage.create_read_session.js - * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async - */ - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - >; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): void; - createReadSession( - request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.IReadSession, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'table_reference.project_id': request.tableReference!.projectId ?? '', - 'table_reference.dataset_id': request.tableReference!.datasetId ?? '', - }); - this.initialize(); - return this.innerApiCalls.createReadSession(request, options, callback); - } - /** - * Creates additional streams for a ReadSession. This API can be used to - * dynamically adjust the parallelism of a batch processing task upwards by - * adding additional workers. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} request.session - * Required. Must be a non-expired session obtained from a call to - * CreateReadSession. Only the name field needs to be set. - * @param {number} request.requestedStreams - * Required. Number of new streams requested. Must be positive. - * Number of added streams may be less than this, see CreateReadSessionRequest - * for more information. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js - * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async - */ - batchCreateReadSessionStreams( - request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - ( - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | undefined - ), - {} | undefined - ] - >; - batchCreateReadSessionStreams( - request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - batchCreateReadSessionStreams( - request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): void; - batchCreateReadSessionStreams( - request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, - ( - | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'session.name': request.session!.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.batchCreateReadSessionStreams( - request, - options, - callback - ); - } - /** - * Triggers the graceful termination of a single stream in a ReadSession. This - * API can be used to dynamically adjust the parallelism of a batch processing - * task downwards without losing data. - * - * This API does not delete the stream -- it remains visible in the - * ReadSession, and any data processed by the stream is not released to other - * streams. However, no additional data will be assigned to the stream once - * this call completes. Callers must continue reading data on the stream until - * the end of the stream is reached so that data which has already been - * assigned to the stream will be processed. - * - * This method will return an error if there are no other live streams - * in the Session, or if SplitReadStream() has been called on the given - * Stream. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream - * Required. Stream to finalize. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1beta1/big_query_storage.finalize_stream.js - * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async - */ - finalizeStream( - request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.protobuf.IEmpty, - ( - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | undefined - ), - {} | undefined - ] - >; - finalizeStream( - request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - finalizeStream( - request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - finalizeStream( - request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.protobuf.IEmpty, - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.protobuf.IEmpty, - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.protobuf.IEmpty, - ( - | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'stream.name': request.stream!.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.finalizeStream(request, options, callback); - } - /** - * Splits a given read stream into two Streams. These streams are referred to - * as the primary and the residual of the split. The original stream can still - * be read from in the same manner as before. Both of the returned streams can - * also be read from, and the total rows return by both child streams will be - * the same as the rows read from the original stream. - * - * Moreover, the two child streams will be allocated back to back in the - * original Stream. Concretely, it is guaranteed that for streams Original, - * Primary, and Residual, that Original[0-j] = Primary[0-j] and - * Original[j-n] = Residual[0-m] once the streams have been read to - * completion. - * - * This method is guaranteed to be idempotent. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.originalStream - * Required. Stream to split. - * @param {number} request.fraction - * A value in the range (0.0, 1.0) that specifies the fractional point at - * which the original stream should be split. The actual split point is - * evaluated on pre-filtered rows, so if a filter is provided, then there is - * no guarantee that the division of the rows between the new child streams - * will be proportional to this fractional value. Additionally, because the - * server-side unit for assigning data is collections of rows, this fraction - * will always map to to a data storage boundary on the server side. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1beta1/big_query_storage.split_read_stream.js - * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async - */ - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options?: CallOptions - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - >; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - callback: Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): void; - splitReadStream( - request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - optionsOrCallback?: - | CallOptions - | Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - >, - callback?: Callback< - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | null - | undefined, - {} | null | undefined - > - ): Promise< - [ - protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, - ( - | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest - | undefined - ), - {} | undefined - ] - > | void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'original_stream.name': request.originalStream!.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.splitReadStream(request, options, callback); - } - - /** - * Reads rows from the table in the format prescribed by the read session. - * Each response contains one or more table rows, up to a maximum of 10 MiB - * per response; read requests which attempt to read individual rows larger - * than this will fail. - * - * Each request also returns a set of stream statistics reflecting the - * estimated total number of rows in the read stream. This number is computed - * based on the total table size and the number of active streams in the read - * session, and may change as other streams continue to read data. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} request.readPosition - * Required. Identifier of the position in the stream to start reading from. - * The offset requested must be less than the last row read from ReadRows. - * Requesting a larger offset is undefined. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) - * for more details and examples. - * @example include:samples/generated/v1beta1/big_query_storage.read_rows.js - * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async - */ - readRows( - request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, - options?: CallOptions - ): gax.CancellableStream { - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers['x-goog-request-params'] = - this._gaxModule.routingHeader.fromParams({ - 'read_position.stream.name': request.readPosition!.stream!.name ?? '', - }); - this.initialize(); - return this.innerApiCalls.readRows(request, options); - } - - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project: string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified readSession resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} session - * @returns {string} Resource name string. - */ - readSessionPath(project: string, location: string, session: string) { - return this.pathTemplates.readSessionPathTemplate.render({ - project: project, - location: location, - session: session, - }); - } - - /** - * Parse the project from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the project. - */ - matchProjectFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .project; - } - - /** - * Parse the location from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the location. - */ - matchLocationFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .location; - } - - /** - * Parse the session from ReadSession resource. - * - * @param {string} readSessionName - * A fully-qualified path representing ReadSession resource. - * @returns {string} A string representing the session. - */ - matchSessionFromReadSessionName(readSessionName: string) { - return this.pathTemplates.readSessionPathTemplate.match(readSessionName) - .session; - } - - /** - * Return a fully-qualified stream resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} stream - * @returns {string} Resource name string. - */ - streamPath(project: string, location: string, stream: string) { - return this.pathTemplates.streamPathTemplate.render({ - project: project, - location: location, - stream: stream, - }); - } - - /** - * Parse the project from Stream resource. - * - * @param {string} streamName - * A fully-qualified path representing Stream resource. - * @returns {string} A string representing the project. - */ - matchProjectFromStreamName(streamName: string) { - return this.pathTemplates.streamPathTemplate.match(streamName).project; - } - - /** - * Parse the location from Stream resource. - * - * @param {string} streamName - * A fully-qualified path representing Stream resource. - * @returns {string} A string representing the location. - */ - matchLocationFromStreamName(streamName: string) { - return this.pathTemplates.streamPathTemplate.match(streamName).location; - } - - /** - * Parse the stream from Stream resource. - * - * @param {string} streamName - * A fully-qualified path representing Stream resource. - * @returns {string} A string representing the stream. - */ - matchStreamFromStreamName(streamName: string) { - return this.pathTemplates.streamPathTemplate.match(streamName).stream; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.bigQueryStorageStub && !this._terminated) { - return this.bigQueryStorageStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json deleted file mode 100644 index 003cb084ff8..00000000000 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.storage.v1beta1.BigQueryStorage": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateReadSession": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ReadRows": { - "timeout_millis": 86400000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "BatchCreateReadSessionStreams": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "FinalizeStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "SplitReadStream": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json deleted file mode 100644 index 0b8010758a6..00000000000 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/storage/v1beta1/arrow.proto", - "../../protos/google/cloud/bigquery/storage/v1beta1/avro.proto", - "../../protos/google/cloud/bigquery/storage/v1beta1/read_options.proto", - "../../protos/google/cloud/bigquery/storage/v1beta1/storage.proto", - "../../protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto" -] diff --git a/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json deleted file mode 100644 index 00d888bf605..00000000000 --- a/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.storage.v1beta1", - "libraryPackage": "@google-cloud/bigquery-storage", - "services": { - "BigQueryStorage": { - "clients": { - "grpc": { - "libraryClient": "BigQueryStorageClient", - "rpcs": { - "CreateReadSession": { - "methods": [ - "createReadSession" - ] - }, - "BatchCreateReadSessionStreams": { - "methods": [ - "batchCreateReadSessionStreams" - ] - }, - "FinalizeStream": { - "methods": [ - "finalizeStream" - ] - }, - "SplitReadStream": { - "methods": [ - "splitReadStream" - ] - }, - "ReadRows": { - "methods": [ - "readRows" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "BigQueryStorageClient", - "rpcs": { - "CreateReadSession": { - "methods": [ - "createReadSession" - ] - }, - "BatchCreateReadSessionStreams": { - "methods": [ - "batchCreateReadSessionStreams" - ] - }, - "FinalizeStream": { - "methods": [ - "finalizeStream" - ] - }, - "SplitReadStream": { - "methods": [ - "splitReadStream" - ] - } - } - } - } - } - } -} diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts deleted file mode 100644 index dc3afed8ea7..00000000000 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {BigQueryStorageClient} from './big_query_storage_client'; diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js deleted file mode 100644 index d59c13c62cd..00000000000 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const storage = require('@google-cloud/bigquery-storage'); - -function main() { - const bigQueryReadClient = new storage.BigQueryReadClient(); - const bigQueryWriteClient = new storage.BigQueryWriteClient(); -} - -main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 6fd6e3ca7ee..00000000000 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { - BigQueryReadClient, - BigQueryWriteClient, -} from '@google-cloud/bigquery-storage'; - -// check that the client class type name can be used -function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { - client.close(); -} -function doStuffWithBigQueryWriteClient(client: BigQueryWriteClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const bigQueryReadClient = new BigQueryReadClient(); - doStuffWithBigQueryReadClient(bigQueryReadClient); - // check that the client instance can be created - const bigQueryWriteClient = new BigQueryWriteClient(); - doStuffWithBigQueryWriteClient(bigQueryWriteClient); -} - -main(); diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts deleted file mode 100644 index 6dd1eaadafa..00000000000 --- a/handwritten/bigquery-storage/system-test/install.ts +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - it('TypeScript code', async function () { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync( - './system-test/fixtures/sample/src/index.ts' - ).toString(), - }, - }; - await packNTest(options); - }); - - it('JavaScript code', async function () { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync( - './system-test/fixtures/sample/src/index.js' - ).toString(), - }, - }; - await packNTest(options); - }); -}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts deleted file mode 100644 index cd2a50bb2c1..00000000000 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ /dev/null @@ -1,896 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as bigqueryreadModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -// Dynamically loaded proto JSON is needed to get the type information -// to fill in default values for request objects -const root = protobuf.Root.fromJSON( - require('../protos/protos.json') -).resolveAll(); - -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function getTypeDefaultValue(typeName: string, fields: string[]) { - let type = root.lookupType(typeName) as protobuf.Type; - for (const field of fields.slice(0, -1)) { - type = type.fields[field]?.resolvedType as protobuf.Type; - } - return type.fields[fields[fields.length - 1]]?.defaultValue; -} - -function generateSampleMessage(instance: T) { - const filledObject = ( - instance.constructor as typeof protobuf.Message - ).toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject - ) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error - ? sinon.stub().rejects(error) - : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback( - response?: ResponseType, - error?: Error -) { - return error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); -} - -function stubServerStreamingCall( - response?: ResponseType, - error?: Error -) { - const transformStub = error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // write something to the stream to trigger transformStub and send the response back to the client - setImmediate(() => { - mockStream.write({}); - }); - setImmediate(() => { - mockStream.end(); - }); - return sinon.stub().returns(mockStream); -} - -describe('v1.BigQueryReadClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigqueryreadModule.v1.BigQueryReadClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryReadStub, undefined); - await client.initialize(); - assert(client.bigQueryReadStub); - }); - - it('has close method for the initialized client', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.bigQueryReadStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryReadStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createReadSession', () => { - it('invokes createReadSession without error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() - ); - request.readSession ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] - ); - request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadSession() - ); - client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); - const [response] = await client.createReadSession(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession without error using callback', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() - ); - request.readSession ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] - ); - request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadSession() - ); - client.innerApiCalls.createReadSession = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createReadSession( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IReadSession | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession with error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() - ); - request.readSession ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] - ); - request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.createReadSession = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.createReadSession(request), expectedError); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession with closed client', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() - ); - request.readSession ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] - ); - request.readSession.table = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createReadSession(request), expectedError); - }); - }); - - describe('splitReadStream', () => { - it('invokes splitReadStream without error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() - ); - client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); - const [response] = await client.splitReadStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream without error using callback', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() - ); - client.innerApiCalls.splitReadStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.splitReadStream( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream with error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.splitReadStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.splitReadStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream with closed client', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.splitReadStream(request), expectedError); - }); - }); - - describe('readRows', () => { - it('invokes readRows without error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] - ); - request.readStream = defaultValue1; - const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() - ); - client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes readRows with error', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] - ); - request.readStream = defaultValue1; - const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.readRows = stubServerStreamingCall( - undefined, - expectedError - ); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - const actualRequest = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes readRows with closed client', async () => { - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] - ); - request.readStream = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - }); - }); - - describe('Path templates', () => { - describe('project', () => { - const fakePath = '/rendered/path/project'; - const expectedParameters = { - project: 'projectValue', - }; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.projectPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath('projectValue'); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('readSession', () => { - const fakePath = '/rendered/path/readSession'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - session: 'sessionValue', - }; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.readSessionPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.readSessionPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('readSessionPath', () => { - const result = client.readSessionPath( - 'projectValue', - 'locationValue', - 'sessionValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.readSessionPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromReadSessionName', () => { - const result = client.matchProjectFromReadSessionName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromReadSessionName', () => { - const result = client.matchLocationFromReadSessionName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchSessionFromReadSessionName', () => { - const result = client.matchSessionFromReadSessionName(fakePath); - assert.strictEqual(result, 'sessionValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('readStream', () => { - const fakePath = '/rendered/path/readStream'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - session: 'sessionValue', - stream: 'streamValue', - }; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.readStreamPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.readStreamPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('readStreamPath', () => { - const result = client.readStreamPath( - 'projectValue', - 'locationValue', - 'sessionValue', - 'streamValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.readStreamPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromReadStreamName', () => { - const result = client.matchProjectFromReadStreamName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromReadStreamName', () => { - const result = client.matchLocationFromReadStreamName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchSessionFromReadStreamName', () => { - const result = client.matchSessionFromReadStreamName(fakePath); - assert.strictEqual(result, 'sessionValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchStreamFromReadStreamName', () => { - const result = client.matchStreamFromReadStreamName(fakePath); - assert.strictEqual(result, 'streamValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('table', () => { - const fakePath = '/rendered/path/table'; - const expectedParameters = { - project: 'projectValue', - dataset: 'datasetValue', - table: 'tableValue', - }; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.tablePathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.tablePathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('tablePath', () => { - const result = client.tablePath( - 'projectValue', - 'datasetValue', - 'tableValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.tablePathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromTableName', () => { - const result = client.matchProjectFromTableName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchDatasetFromTableName', () => { - const result = client.matchDatasetFromTableName(fakePath); - assert.strictEqual(result, 'datasetValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchTableFromTableName', () => { - const result = client.matchTableFromTableName(fakePath); - assert.strictEqual(result, 'tableValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('writeStream', () => { - const fakePath = '/rendered/path/writeStream'; - const expectedParameters = { - project: 'projectValue', - dataset: 'datasetValue', - table: 'tableValue', - stream: 'streamValue', - }; - const client = new bigqueryreadModule.v1.BigQueryReadClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.writeStreamPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.writeStreamPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('writeStreamPath', () => { - const result = client.writeStreamPath( - 'projectValue', - 'datasetValue', - 'tableValue', - 'streamValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromWriteStreamName', () => { - const result = client.matchProjectFromWriteStreamName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchDatasetFromWriteStreamName', () => { - const result = client.matchDatasetFromWriteStreamName(fakePath); - assert.strictEqual(result, 'datasetValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchTableFromWriteStreamName', () => { - const result = client.matchTableFromWriteStreamName(fakePath); - assert.strictEqual(result, 'tableValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchStreamFromWriteStreamName', () => { - const result = client.matchStreamFromWriteStreamName(fakePath); - assert.strictEqual(result, 'streamValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - }); -}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts deleted file mode 100644 index 3623068f487..00000000000 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ /dev/null @@ -1,1055 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as bigquerystorageModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -// Dynamically loaded proto JSON is needed to get the type information -// to fill in default values for request objects -const root = protobuf.Root.fromJSON( - require('../protos/protos.json') -).resolveAll(); - -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function getTypeDefaultValue(typeName: string, fields: string[]) { - let type = root.lookupType(typeName) as protobuf.Type; - for (const field of fields.slice(0, -1)) { - type = type.fields[field]?.resolvedType as protobuf.Type; - } - return type.fields[fields[fields.length - 1]]?.defaultValue; -} - -function generateSampleMessage(instance: T) { - const filledObject = ( - instance.constructor as typeof protobuf.Message - ).toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject - ) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error - ? sinon.stub().rejects(error) - : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback( - response?: ResponseType, - error?: Error -) { - return error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); -} - -function stubServerStreamingCall( - response?: ResponseType, - error?: Error -) { - const transformStub = error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // write something to the stream to trigger transformStub and send the response back to the client - setImmediate(() => { - mockStream.write({}); - }); - setImmediate(() => { - mockStream.end(); - }); - return sinon.stub().returns(mockStream); -} - -describe('v1beta1.BigQueryStorageClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = - bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryStorageStub, undefined); - await client.initialize(); - assert(client.bigQueryStorageStub); - }); - - it('has close method for the initialized client', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.bigQueryStorageStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryStorageStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createReadSession', () => { - it('invokes createReadSession without error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() - ); - request.tableReference ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] - ); - request.tableReference.projectId = defaultValue1; - request.tableReference ??= {}; - const defaultValue2 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] - ); - request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() - ); - client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); - const [response] = await client.createReadSession(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession without error using callback', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() - ); - request.tableReference ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] - ); - request.tableReference.projectId = defaultValue1; - request.tableReference ??= {}; - const defaultValue2 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] - ); - request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() - ); - client.innerApiCalls.createReadSession = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createReadSession( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.IReadSession | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession with error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() - ); - request.tableReference ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] - ); - request.tableReference.projectId = defaultValue1; - request.tableReference ??= {}; - const defaultValue2 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] - ); - request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; - const expectedError = new Error('expected'); - client.innerApiCalls.createReadSession = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.createReadSession(request), expectedError); - const actualRequest = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createReadSession as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createReadSession with closed client', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() - ); - request.tableReference ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] - ); - request.tableReference.projectId = defaultValue1; - request.tableReference ??= {}; - const defaultValue2 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] - ); - request.tableReference.datasetId = defaultValue2; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createReadSession(request), expectedError); - }); - }); - - describe('batchCreateReadSessionStreams', () => { - it('invokes batchCreateReadSessionStreams without error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() - ); - request.session ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] - ); - request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() - ); - client.innerApiCalls.batchCreateReadSessionStreams = - stubSimpleCall(expectedResponse); - const [response] = await client.batchCreateReadSessionStreams(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCreateReadSessionStreams without error using callback', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() - ); - request.session ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] - ); - request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() - ); - client.innerApiCalls.batchCreateReadSessionStreams = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.batchCreateReadSessionStreams( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCreateReadSessionStreams with error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() - ); - request.session ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] - ); - request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects( - client.batchCreateReadSessionStreams(request), - expectedError - ); - const actualRequest = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCreateReadSessionStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCreateReadSessionStreams with closed client', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() - ); - request.session ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] - ); - request.session.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects( - client.batchCreateReadSessionStreams(request), - expectedError - ); - }); - }); - - describe('finalizeStream', () => { - it('invokes finalizeStream without error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() - ); - request.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] - ); - request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() - ); - client.innerApiCalls.finalizeStream = stubSimpleCall(expectedResponse); - const [response] = await client.finalizeStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeStream without error using callback', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() - ); - request.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] - ); - request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() - ); - client.innerApiCalls.finalizeStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.finalizeStream( - request, - ( - err?: Error | null, - result?: protos.google.protobuf.IEmpty | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeStream with error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() - ); - request.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] - ); - request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.finalizeStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.finalizeStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeStream with closed client', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() - ); - request.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] - ); - request.stream.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.finalizeStream(request), expectedError); - }); - }); - - describe('splitReadStream', () => { - it('invokes splitReadStream without error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() - ); - request.originalStream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] - ); - request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() - ); - client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); - const [response] = await client.splitReadStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream without error using callback', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() - ); - request.originalStream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] - ); - request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() - ); - client.innerApiCalls.splitReadStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.splitReadStream( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream with error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() - ); - request.originalStream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] - ); - request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.splitReadStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.splitReadStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.splitReadStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes splitReadStream with closed client', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() - ); - request.originalStream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] - ); - request.originalStream.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.splitReadStream(request), expectedError); - }); - }); - - describe('readRows', () => { - it('invokes readRows without error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() - ); - request.readPosition ??= {}; - request.readPosition.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] - ); - request.readPosition.stream.name = defaultValue1; - const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() - ); - client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes readRows with error', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() - ); - request.readPosition ??= {}; - request.readPosition.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] - ); - request.readPosition.stream.name = defaultValue1; - const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.readRows = stubServerStreamingCall( - undefined, - expectedError - ); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - const actualRequest = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.readRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes readRows with closed client', async () => { - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() - ); - request.readPosition ??= {}; - request.readPosition.stream ??= {}; - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] - ); - request.readPosition.stream.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - const stream = client.readRows(request); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - }); - }); - - describe('Path templates', () => { - describe('project', () => { - const fakePath = '/rendered/path/project'; - const expectedParameters = { - project: 'projectValue', - }; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.projectPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath('projectValue'); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('readSession', () => { - const fakePath = '/rendered/path/readSession'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - session: 'sessionValue', - }; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.readSessionPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.readSessionPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('readSessionPath', () => { - const result = client.readSessionPath( - 'projectValue', - 'locationValue', - 'sessionValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.readSessionPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromReadSessionName', () => { - const result = client.matchProjectFromReadSessionName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromReadSessionName', () => { - const result = client.matchLocationFromReadSessionName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchSessionFromReadSessionName', () => { - const result = client.matchSessionFromReadSessionName(fakePath); - assert.strictEqual(result, 'sessionValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('stream', () => { - const fakePath = '/rendered/path/stream'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - stream: 'streamValue', - }; - const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.streamPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.streamPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('streamPath', () => { - const result = client.streamPath( - 'projectValue', - 'locationValue', - 'streamValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.streamPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromStreamName', () => { - const result = client.matchProjectFromStreamName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.streamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromStreamName', () => { - const result = client.matchLocationFromStreamName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.streamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchStreamFromStreamName', () => { - const result = client.matchStreamFromStreamName(fakePath); - assert.strictEqual(result, 'streamValue'); - assert( - (client.pathTemplates.streamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - }); -}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts deleted file mode 100644 index 830e18a06bb..00000000000 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as bigquerywriteModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -// Dynamically loaded proto JSON is needed to get the type information -// to fill in default values for request objects -const root = protobuf.Root.fromJSON( - require('../protos/protos.json') -).resolveAll(); - -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function getTypeDefaultValue(typeName: string, fields: string[]) { - let type = root.lookupType(typeName) as protobuf.Type; - for (const field of fields.slice(0, -1)) { - type = type.fields[field]?.resolvedType as protobuf.Type; - } - return type.fields[fields[fields.length - 1]]?.defaultValue; -} - -function generateSampleMessage(instance: T) { - const filledObject = ( - instance.constructor as typeof protobuf.Message - ).toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject - ) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error - ? sinon.stub().rejects(error) - : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback( - response?: ResponseType, - error?: Error -) { - return error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); -} - -function stubBidiStreamingCall( - response?: ResponseType, - error?: Error -) { - const transformStub = error - ? sinon.stub().callsArgWith(2, error) - : sinon.stub().callsArgWith(2, null, response); - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - return sinon.stub().returns(mockStream); -} - -describe('v1.BigQueryWriteClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = - bigquerywriteModule.v1.BigQueryWriteClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = bigquerywriteModule.v1.BigQueryWriteClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryWriteStub, undefined); - await client.initialize(); - assert(client.bigQueryWriteStub); - }); - - it('has close method for the initialized client', done => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.bigQueryWriteStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.bigQueryWriteStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createWriteStream', () => { - it('invokes createWriteStream without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() - ); - client.innerApiCalls.createWriteStream = stubSimpleCall(expectedResponse); - const [response] = await client.createWriteStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createWriteStream without error using callback', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() - ); - client.innerApiCalls.createWriteStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createWriteStream( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createWriteStream with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.createWriteStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.createWriteStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.createWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes createWriteStream with closed client', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createWriteStream(request), expectedError); - }); - }); - - describe('getWriteStream', () => { - it('invokes getWriteStream without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() - ); - client.innerApiCalls.getWriteStream = stubSimpleCall(expectedResponse); - const [response] = await client.getWriteStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes getWriteStream without error using callback', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() - ); - client.innerApiCalls.getWriteStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getWriteStream( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes getWriteStream with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.getWriteStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.getWriteStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.getWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes getWriteStream with closed client', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getWriteStream(request), expectedError); - }); - }); - - describe('finalizeWriteStream', () => { - it('invokes finalizeWriteStream without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() - ); - client.innerApiCalls.finalizeWriteStream = - stubSimpleCall(expectedResponse); - const [response] = await client.finalizeWriteStream(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeWriteStream without error using callback', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() - ); - client.innerApiCalls.finalizeWriteStream = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.finalizeWriteStream( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeWriteStream with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.finalizeWriteStream = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects(client.finalizeWriteStream(request), expectedError); - const actualRequest = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.finalizeWriteStream as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes finalizeWriteStream with closed client', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] - ); - request.name = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.finalizeWriteStream(request), expectedError); - }); - }); - - describe('batchCommitWriteStreams', () => { - it('invokes batchCommitWriteStreams without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() - ); - client.innerApiCalls.batchCommitWriteStreams = - stubSimpleCall(expectedResponse); - const [response] = await client.batchCommitWriteStreams(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCommitWriteStreams without error using callback', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() - ); - client.innerApiCalls.batchCommitWriteStreams = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.batchCommitWriteStreams( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCommitWriteStreams with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall( - undefined, - expectedError - ); - await assert.rejects( - client.batchCommitWriteStreams(request), - expectedError - ); - const actualRequest = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.batchCommitWriteStreams as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes batchCommitWriteStreams with closed client', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] - ); - request.parent = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects( - client.batchCommitWriteStreams(request), - expectedError - ); - }); - }); - - describe('flushRows', () => { - it('invokes flushRows without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] - ); - request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() - ); - client.innerApiCalls.flushRows = stubSimpleCall(expectedResponse); - const [response] = await client.flushRows(request); - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes flushRows without error using callback', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] - ); - request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() - ); - client.innerApiCalls.flushRows = - stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.flushRows( - request, - ( - err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - const actualRequest = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes flushRows with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] - ); - request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; - const expectedError = new Error('expected'); - client.innerApiCalls.flushRows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.flushRows(request), expectedError); - const actualRequest = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[0]; - assert.deepStrictEqual(actualRequest, request); - const actualHeaderRequestParams = ( - client.innerApiCalls.flushRows as SinonStub - ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; - assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); - }); - - it('invokes flushRows with closed client', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() - ); - const defaultValue1 = getTypeDefaultValue( - '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] - ); - request.writeStream = defaultValue1; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.flushRows(request), expectedError); - }); - }); - - describe('appendRows', () => { - it('invokes appendRows without error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() - ); - - const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse() - ); - client.innerApiCalls.appendRows = stubBidiStreamingCall(expectedResponse); - const stream = client.appendRows(); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - stream.write(request); - stream.end(); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert( - (client.innerApiCalls.appendRows as SinonStub) - .getCall(0) - .calledWith(null) - ); - assert.deepStrictEqual( - ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) - .args[0], - request - ); - }); - - it('invokes appendRows with error', async () => { - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() - ); - const expectedError = new Error('expected'); - client.innerApiCalls.appendRows = stubBidiStreamingCall( - undefined, - expectedError - ); - const stream = client.appendRows(); - const promise = new Promise((resolve, reject) => { - stream.on( - 'data', - ( - response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse - ) => { - resolve(response); - } - ); - stream.on('error', (err: Error) => { - reject(err); - }); - stream.write(request); - stream.end(); - }); - await assert.rejects(promise, expectedError); - assert( - (client.innerApiCalls.appendRows as SinonStub) - .getCall(0) - .calledWith(null) - ); - assert.deepStrictEqual( - ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) - .args[0], - request - ); - }); - }); - - describe('Path templates', () => { - describe('project', () => { - const fakePath = '/rendered/path/project'; - const expectedParameters = { - project: 'projectValue', - }; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.projectPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath('projectValue'); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('readSession', () => { - const fakePath = '/rendered/path/readSession'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - session: 'sessionValue', - }; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.readSessionPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.readSessionPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('readSessionPath', () => { - const result = client.readSessionPath( - 'projectValue', - 'locationValue', - 'sessionValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.readSessionPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromReadSessionName', () => { - const result = client.matchProjectFromReadSessionName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromReadSessionName', () => { - const result = client.matchLocationFromReadSessionName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchSessionFromReadSessionName', () => { - const result = client.matchSessionFromReadSessionName(fakePath); - assert.strictEqual(result, 'sessionValue'); - assert( - (client.pathTemplates.readSessionPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('readStream', () => { - const fakePath = '/rendered/path/readStream'; - const expectedParameters = { - project: 'projectValue', - location: 'locationValue', - session: 'sessionValue', - stream: 'streamValue', - }; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.readStreamPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.readStreamPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('readStreamPath', () => { - const result = client.readStreamPath( - 'projectValue', - 'locationValue', - 'sessionValue', - 'streamValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.readStreamPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromReadStreamName', () => { - const result = client.matchProjectFromReadStreamName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchLocationFromReadStreamName', () => { - const result = client.matchLocationFromReadStreamName(fakePath); - assert.strictEqual(result, 'locationValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchSessionFromReadStreamName', () => { - const result = client.matchSessionFromReadStreamName(fakePath); - assert.strictEqual(result, 'sessionValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchStreamFromReadStreamName', () => { - const result = client.matchStreamFromReadStreamName(fakePath); - assert.strictEqual(result, 'streamValue'); - assert( - (client.pathTemplates.readStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('table', () => { - const fakePath = '/rendered/path/table'; - const expectedParameters = { - project: 'projectValue', - dataset: 'datasetValue', - table: 'tableValue', - }; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.tablePathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.tablePathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('tablePath', () => { - const result = client.tablePath( - 'projectValue', - 'datasetValue', - 'tableValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.tablePathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromTableName', () => { - const result = client.matchProjectFromTableName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchDatasetFromTableName', () => { - const result = client.matchDatasetFromTableName(fakePath); - assert.strictEqual(result, 'datasetValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchTableFromTableName', () => { - const result = client.matchTableFromTableName(fakePath); - assert.strictEqual(result, 'tableValue'); - assert( - (client.pathTemplates.tablePathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - - describe('writeStream', () => { - const fakePath = '/rendered/path/writeStream'; - const expectedParameters = { - project: 'projectValue', - dataset: 'datasetValue', - table: 'tableValue', - stream: 'streamValue', - }; - const client = new bigquerywriteModule.v1.BigQueryWriteClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.writeStreamPathTemplate.render = sinon - .stub() - .returns(fakePath); - client.pathTemplates.writeStreamPathTemplate.match = sinon - .stub() - .returns(expectedParameters); - - it('writeStreamPath', () => { - const result = client.writeStreamPath( - 'projectValue', - 'datasetValue', - 'tableValue', - 'streamValue' - ); - assert.strictEqual(result, fakePath); - assert( - (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) - .getCall(-1) - .calledWith(expectedParameters) - ); - }); - - it('matchProjectFromWriteStreamName', () => { - const result = client.matchProjectFromWriteStreamName(fakePath); - assert.strictEqual(result, 'projectValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchDatasetFromWriteStreamName', () => { - const result = client.matchDatasetFromWriteStreamName(fakePath); - assert.strictEqual(result, 'datasetValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchTableFromWriteStreamName', () => { - const result = client.matchTableFromWriteStreamName(fakePath); - assert.strictEqual(result, 'tableValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - - it('matchStreamFromWriteStreamName', () => { - const result = client.matchStreamFromWriteStreamName(fakePath); - assert.strictEqual(result, 'streamValue'); - assert( - (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) - .getCall(-1) - .calledWith(fakePath) - ); - }); - }); - }); -}); diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json deleted file mode 100644 index c78f1c884ef..00000000000 --- a/handwritten/bigquery-storage/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js deleted file mode 100644 index de163617408..00000000000 --- a/handwritten/bigquery-storage/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'BigQueryRead', - filename: './big-query-read.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/, - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader', - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader', - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader', - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader', - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader', - }, - ], - }, - mode: 'production', -}; From 76830104661c20103681aee9b81e6bbf242bccb0 Mon Sep 17 00:00:00 2001 From: sofisl <55454395+sofisl@users.noreply.github.com> Date: Thu, 15 Dec 2022 20:16:54 -0800 Subject: [PATCH 203/333] Revert "build: update README for deprecation notice and delete all files except samples (#303)" (#310) This reverts commit 1d8bee33368c2eb1f2d27a9973552f73daa9334c. --- .../.github/.OwlBot.lock.yaml | 17 + .../bigquery-storage/.github/.OwlBot.yaml | 30 + .../bigquery-storage/.github/CODEOWNERS | 12 + .../.github/ISSUE_TEMPLATE/bug_report.md | 38 + .../.github/ISSUE_TEMPLATE/config.yml | 4 + .../.github/ISSUE_TEMPLATE/feature_request.md | 18 + .../.github/ISSUE_TEMPLATE/question.md | 12 + .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../bigquery-storage/.github/auto-approve.yml | 3 + .../bigquery-storage/.github/auto-label.yaml | 2 + .../.github/generated-files-bot.yml | 16 + .../.github/release-please.yml | 2 + .../.github/release-trigger.yml | 1 + .../.github/sync-repo-settings.yaml | 24 + .../.kokoro/continuous/node12/common.cfg | 24 + .../.kokoro/continuous/node12/lint.cfg | 4 + .../continuous/node12/samples-test.cfg | 7 + .../.kokoro/continuous/node12/system-test.cfg | 7 + .../.kokoro/continuous/node12/test.cfg | 0 handwritten/bigquery-storage/.kokoro/docs.sh | 25 + handwritten/bigquery-storage/.kokoro/lint.sh | 33 + .../.kokoro/presubmit/windows/common.cfg | 2 + .../.kokoro/presubmit/windows/test.cfg | 2 + .../bigquery-storage/.kokoro/publish.sh | 30 + .../.kokoro/release/docs-devsite.cfg | 26 + .../.kokoro/release/docs-devsite.sh | 29 + .../bigquery-storage/.kokoro/release/docs.cfg | 26 + .../bigquery-storage/.kokoro/release/docs.sh | 49 + .../.kokoro/release/publish.cfg | 39 + .../bigquery-storage/.kokoro/samples-test.sh | 19 +- .../bigquery-storage/.kokoro/system-test.sh | 48 +- handwritten/bigquery-storage/.kokoro/test.bat | 33 + handwritten/bigquery-storage/.kokoro/test.sh | 51 + handwritten/bigquery-storage/CHANGELOG.md | 223 + .../bigquery-storage/CODE_OF_CONDUCT.md | 94 + handwritten/bigquery-storage/CONTRIBUTING.md | 76 + handwritten/bigquery-storage/LICENSE | 202 + handwritten/bigquery-storage/README.md | 2 - .../bigquery-storage/linkinator.config.json | 16 + handwritten/bigquery-storage/owlbot.py | 15 +- handwritten/bigquery-storage/package.json | 54 + .../bigquery/storage/v1/annotations.proto | 28 + .../cloud/bigquery/storage/v1/arrow.proto | 64 + .../cloud/bigquery/storage/v1/avro.proto | 56 + .../cloud/bigquery/storage/v1/protobuf.proto | 48 + .../cloud/bigquery/storage/v1/storage.proto | 666 + .../cloud/bigquery/storage/v1/stream.proto | 286 + .../cloud/bigquery/storage/v1/table.proto | 166 + .../bigquery/storage/v1beta1/arrow.proto | 36 + .../cloud/bigquery/storage/v1beta1/avro.proto | 37 + .../storage/v1beta1/read_options.proto | 39 + .../bigquery/storage/v1beta1/storage.proto | 405 + .../storage/v1beta1/table_reference.proto | 41 + .../bigquery-storage/protos/protos.d.ts | 11968 ++++++ handwritten/bigquery-storage/protos/protos.js | 29973 ++++++++++++++++ .../bigquery-storage/protos/protos.json | 2817 ++ handwritten/bigquery-storage/src/index.ts | 38 + .../src/v1/big_query_read_client.ts | 938 + .../src/v1/big_query_read_client_config.json | 44 + .../src/v1/big_query_read_proto_list.json | 9 + .../src/v1/big_query_write_client.ts | 1213 + .../src/v1/big_query_write_client_config.json | 73 + .../src/v1/big_query_write_proto_list.json | 9 + .../src/v1/gapic_metadata.json | 117 + handwritten/bigquery-storage/src/v1/index.ts | 20 + .../src/v1beta1/big_query_storage_client.ts | 1004 + .../big_query_storage_client_config.json | 54 + .../v1beta1/big_query_storage_proto_list.json | 7 + .../src/v1beta1/gapic_metadata.json | 68 + .../bigquery-storage/src/v1beta1/index.ts | 19 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 41 + .../bigquery-storage/system-test/install.ts | 51 + .../test/gapic_big_query_read_v1.ts | 896 + .../test/gapic_big_query_storage_v1beta1.ts | 1055 + .../test/gapic_big_query_write_v1.ts | 1246 + handwritten/bigquery-storage/tsconfig.json | 19 + .../bigquery-storage/webpack.config.js | 64 + 79 files changed, 54958 insertions(+), 13 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/.OwlBot.lock.yaml create mode 100644 handwritten/bigquery-storage/.github/.OwlBot.yaml create mode 100644 handwritten/bigquery-storage/.github/CODEOWNERS create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 handwritten/bigquery-storage/.github/auto-approve.yml create mode 100644 handwritten/bigquery-storage/.github/auto-label.yaml create mode 100644 handwritten/bigquery-storage/.github/generated-files-bot.yml create mode 100644 handwritten/bigquery-storage/.github/release-please.yml create mode 100644 handwritten/bigquery-storage/.github/release-trigger.yml create mode 100644 handwritten/bigquery-storage/.github/sync-repo-settings.yaml create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/docs.sh create mode 100755 handwritten/bigquery-storage/.kokoro/lint.sh create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/publish.sh create mode 100644 handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh create mode 100644 handwritten/bigquery-storage/.kokoro/release/docs.cfg create mode 100755 handwritten/bigquery-storage/.kokoro/release/docs.sh create mode 100644 handwritten/bigquery-storage/.kokoro/release/publish.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/test.bat create mode 100755 handwritten/bigquery-storage/.kokoro/test.sh create mode 100644 handwritten/bigquery-storage/CHANGELOG.md create mode 100644 handwritten/bigquery-storage/CODE_OF_CONDUCT.md create mode 100644 handwritten/bigquery-storage/CONTRIBUTING.md create mode 100644 handwritten/bigquery-storage/LICENSE create mode 100644 handwritten/bigquery-storage/linkinator.config.json create mode 100644 handwritten/bigquery-storage/package.json create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto create mode 100644 handwritten/bigquery-storage/protos/protos.d.ts create mode 100644 handwritten/bigquery-storage/protos/protos.js create mode 100644 handwritten/bigquery-storage/protos/protos.json create mode 100644 handwritten/bigquery-storage/src/index.ts create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client.ts create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client.ts create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json create mode 100644 handwritten/bigquery-storage/src/v1/gapic_metadata.json create mode 100644 handwritten/bigquery-storage/src/v1/index.ts create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json create mode 100644 handwritten/bigquery-storage/src/v1beta1/index.ts create mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js create mode 100644 handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts create mode 100644 handwritten/bigquery-storage/system-test/install.ts create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts create mode 100644 handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts create mode 100644 handwritten/bigquery-storage/tsconfig.json create mode 100644 handwritten/bigquery-storage/webpack.config.js diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml new file mode 100644 index 00000000000..e97989708da --- /dev/null +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest + digest: sha256:5b05f26103855c3a15433141389c478d1d3fe088fb5d4e3217c4793f6b3f245e +# created: 2022-11-04 diff --git a/handwritten/bigquery-storage/.github/.OwlBot.yaml b/handwritten/bigquery-storage/.github/.OwlBot.yaml new file mode 100644 index 00000000000..2d27e09de99 --- /dev/null +++ b/handwritten/bigquery-storage/.github/.OwlBot.yaml @@ -0,0 +1,30 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest + +deep-preserve-regex: + - /owl-bot-staging/v1alpha2 + - /owl-bot-staging/v1beta2 + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a + diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS new file mode 100644 index 00000000000..6d82f1f7467 --- /dev/null +++ b/handwritten/bigquery-storage/.github/CODEOWNERS @@ -0,0 +1,12 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The yoshi-nodejs team is the default owner for nodejs repositories. +* @googleapis/yoshi-nodejs @googleapis/api-bigquery + +# The github automation team is the default owner for the auto-approve file. +.github/auto-approve.yml @googleapis/github-automation diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..0ad95022413 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +labels: 'type: bug, priority: p2' +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +1) Is this a client library issue or a product issue? +This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. + +2) Did someone already solve this? + - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery-storage/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node + - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js + +3) Do you have a support contract? +Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. + +If the support paths suggested above still do not result in a resolution, please provide the following details. + +#### Environment details + + - OS: + - Node.js version: + - npm version: + - `@google-cloud/bigquery-storage` version: + +#### Steps to reproduce + + 1. ? + 2. ? + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..603b90133b6 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,4 @@ +contact_links: + - name: Google Cloud Support + url: https://cloud.google.com/support/ + about: If you have a support contract with Google, please use the Google Cloud Support portal. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000000..b0327dfa02e --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library +labels: 'type: feature request, priority: p3' +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000000..97323113911 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,12 @@ +--- +name: Question +about: Ask a question +labels: 'type: question, priority: p3' +--- + +Thanks for stopping by to ask us a question! Please make sure to include: +- What you're trying to do +- What code you've already tried +- Any error messages you're getting + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 00000000000..99586903212 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..19153139702 --- /dev/null +++ b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml new file mode 100644 index 00000000000..4cd91cc16ae --- /dev/null +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -0,0 +1,3 @@ +processes: + - "NodeDependency" + - "OwlBotTemplateChanges" diff --git a/handwritten/bigquery-storage/.github/auto-label.yaml b/handwritten/bigquery-storage/.github/auto-label.yaml new file mode 100644 index 00000000000..09c8d735b45 --- /dev/null +++ b/handwritten/bigquery-storage/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml new file mode 100644 index 00000000000..992ccef4a13 --- /dev/null +++ b/handwritten/bigquery-storage/.github/generated-files-bot.yml @@ -0,0 +1,16 @@ +generatedFiles: +- path: '.kokoro/**' + message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/CODEOWNERS' + message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' +- path: '.github/workflows/ci.yaml' + message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: '.github/generated-files-bot.+(yml|yaml)' + message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' +- path: 'README.md' + message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' +- path: 'samples/README.md' + message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' +ignoreAuthors: +- 'gcf-owl-bot[bot]' +- 'yoshi-automation' diff --git a/handwritten/bigquery-storage/.github/release-please.yml b/handwritten/bigquery-storage/.github/release-please.yml new file mode 100644 index 00000000000..a1b41da3cb3 --- /dev/null +++ b/handwritten/bigquery-storage/.github/release-please.yml @@ -0,0 +1,2 @@ +handleGHRelease: true +releaseType: node diff --git a/handwritten/bigquery-storage/.github/release-trigger.yml b/handwritten/bigquery-storage/.github/release-trigger.yml new file mode 100644 index 00000000000..d4ca94189e1 --- /dev/null +++ b/handwritten/bigquery-storage/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml new file mode 100644 index 00000000000..4a30a08e54c --- /dev/null +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -0,0 +1,24 @@ +branchProtectionRules: + - pattern: main + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - "ci/kokoro: Samples test" + - "ci/kokoro: System test" + - docs + - lint + - test (12) + - test (14) + - test (16) + - cla/google + - windows + - OwlBot Post Processor +permissionRules: + - team: yoshi-admins + permission: admin + - team: jsteam-admins + permission: admin + - team: jsteam + permission: push diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg new file mode 100644 index 00000000000..7fc0cdeac69 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg new file mode 100644 index 00000000000..0a5d546b96b --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg new file mode 100644 index 00000000000..68b02101fc1 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg new file mode 100644 index 00000000000..3ccb29d69f8 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg @@ -0,0 +1,7 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/docs.sh b/handwritten/bigquery-storage/.kokoro/docs.sh new file mode 100755 index 00000000000..85901242b5e --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/docs.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=${HOME}/.npm-global + +cd $(dirname $0)/.. + +npm install + +npm run docs-test diff --git a/handwritten/bigquery-storage/.kokoro/lint.sh b/handwritten/bigquery-storage/.kokoro/lint.sh new file mode 100755 index 00000000000..aef4866e4c4 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/lint.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=${HOME}/.npm-global + +cd $(dirname $0)/.. + +npm install + +# Install and link samples +if [ -f samples/package.json ]; then + cd samples/ + npm link ../ + npm install + cd .. +fi + +npm run lint diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg new file mode 100644 index 00000000000..d6e25e0b1b8 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/windows/common.cfg @@ -0,0 +1,2 @@ +# Format: //devtools/kokoro/config/proto/build.proto + diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg new file mode 100644 index 00000000000..83de067d5f1 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg @@ -0,0 +1,2 @@ +# Use the test file directly +build_file: "nodejs-bigquery-storage/.kokoro/test.bat" diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh new file mode 100755 index 00000000000..949e3e1d0c2 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=${HOME}/.npm-global + +# Start the releasetool reporter +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +cd $(dirname $0)/.. + +NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) +echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc + +npm install +npm publish --access=public --registry=https://wombat-dressing-room.appspot.com diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg new file mode 100644 index 00000000000..8bcc62cc814 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/release/docs-devsite.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh new file mode 100755 index 00000000000..2198e67fe92 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" + cd $(dirname $0)/../.. +fi + +npm install +npm install --no-save @google-cloud/cloud-rad@^0.2.5 +npx @google-cloud/cloud-rad \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg new file mode 100644 index 00000000000..17861c90782 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -0,0 +1,26 @@ +# service account used to publish up-to-date docs. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# doc publications use a Python image. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/release/docs.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh new file mode 100755 index 00000000000..1d8f3f490a5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/docs.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# build jsdocs (Python is installed on the Node 10 docker image). +if [[ -z "$CREDENTIALS" ]]; then + # if CREDENTIALS are explicitly set, assume we're testing locally + # and don't set NPM_CONFIG_PREFIX. + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" + cd $(dirname $0)/../.. +fi +npm install +npm run docs + +# create docs.metadata, based on package.json and .repo-metadata.json. +npm i json@9.0.6 -g +python3 -m docuploader create-metadata \ + --name=$(cat .repo-metadata.json | json name) \ + --version=$(cat package.json | json version) \ + --language=$(cat .repo-metadata.json | json language) \ + --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ + --product-page=$(cat .repo-metadata.json | json product_documentation) \ + --github-repository=$(cat .repo-metadata.json | json repo) \ + --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) +cp docs.metadata ./docs/docs.metadata + +# deploy the docs. +if [[ -z "$CREDENTIALS" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi +if [[ -z "$BUCKET" ]]; then + BUCKET=docs-staging +fi +python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg new file mode 100644 index 00000000000..ba6547f468f --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -0,0 +1,39 @@ +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-npm-token-1" + } + } +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/publish.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 5228b38be94..fbc058a4ec4 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -1,7 +1,8 @@ #!/bin/bash -# Copyright 2022 Google LLC + +# Copyright 2018 Google LLC # -# Licensed under the Apache License, Version 2.0 (the License); +# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -12,22 +13,32 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + set -eo pipefail + export NPM_CONFIG_PREFIX=${HOME}/.npm-global + # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export GCLOUD_PROJECT=long-door-651 + cd $(dirname $0)/.. + # Run a pre-test hook, if a pre-samples-test.sh is in the project if [ -f .kokoro/pre-samples-test.sh ]; then set +x . .kokoro/pre-samples-test.sh set -x fi + if [ -f samples/package.json ]; then + npm install + # Install and link samples cd samples/ + npm link ../ npm install + cd .. # If tests are running against main branch, configure flakybot # to open issues on failures: if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then @@ -39,8 +50,10 @@ if [ -f samples/package.json ]; then } trap cleanup EXIT HUP fi - npm run test + + npm run samples-test fi + # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: COVERAGE_NODE=12 diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 506e797862a..87fa0653d76 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -1,5 +1,6 @@ #!/bin/bash -# Copyright 2022 Google LLC + +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +13,49 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + set -eo pipefail -echo "no-op" +export NPM_CONFIG_PREFIX=${HOME}/.npm-global + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +export GCLOUD_PROJECT=long-door-651 + +cd $(dirname $0)/.. + +# Run a pre-test hook, if a pre-system-test.sh is in the project +if [ -f .kokoro/pre-system-test.sh ]; then + set +x + . .kokoro/pre-system-test.sh + set -x +fi + +npm install + +# If tests are running against main branch, configure flakybot +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + +npm run system-test + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=12 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/handwritten/bigquery-storage/.kokoro/test.bat b/handwritten/bigquery-storage/.kokoro/test.bat new file mode 100644 index 00000000000..ae59e59be3e --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/test.bat @@ -0,0 +1,33 @@ +@rem Copyright 2018 Google LLC. All rights reserved. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem http://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. + +@echo "Starting Windows build" + +cd /d %~dp0 +cd .. + +@rem npm path is not currently set in our image, we should fix this next time +@rem we upgrade Node.js in the image: +SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm + +call nvm use v12.14.1 +call which node + +call npm install || goto :error +call npm run test || goto :error + +goto :EOF + +:error +exit /b 1 diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh new file mode 100755 index 00000000000..a5c7ac04cd3 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +export NPM_CONFIG_PREFIX=${HOME}/.npm-global + +cd $(dirname $0)/.. + +npm install +# If tests are running against main branch, configure flakybot +# to open issues on failures: +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then + export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml + export MOCHA_REPORTER=xunit + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi +# Unit tests exercise the entire API surface, which may include +# deprecation warnings: +export MOCHA_THROW_DEPRECATION=false +npm test + +# codecov combines coverage across integration and unit tests. Include +# the logic below for any environment you wish to collect coverage for: +COVERAGE_NODE=12 +if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then + NYC_BIN=./node_modules/nyc/bin/nyc.js + if [ -f "$NYC_BIN" ]; then + $NYC_BIN report || true + fi + bash $KOKORO_GFILE_DIR/codecov.sh +else + echo "coverage is only reported for Node $COVERAGE_NODE" +fi diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md new file mode 100644 index 00000000000..eeb46ea09fd --- /dev/null +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -0,0 +1,223 @@ +# Changelog + +## [3.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.1...v3.2.0) (2022-11-11) + + +### Features + +* Add location to WriteStream and add WriteStreamView support ([#295](https://github.com/googleapis/nodejs-bigquery-storage/issues/295)) ([ba3c5ef](https://github.com/googleapis/nodejs-bigquery-storage/commit/ba3c5ef05366b1e9a542b9b13fc0c7a25118b2a3)) + +## [3.1.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.0...v3.1.1) (2022-09-01) + + +### Bug Fixes + +* Allow passing gax instance to client constructor ([#292](https://github.com/googleapis/nodejs-bigquery-storage/issues/292)) ([89f953d](https://github.com/googleapis/nodejs-bigquery-storage/commit/89f953de24d774de076ce9aeef649122ab3d65a6)) +* Do not import the whole google-gax from proto JS ([#1553](https://github.com/googleapis/nodejs-bigquery-storage/issues/1553)) ([#291](https://github.com/googleapis/nodejs-bigquery-storage/issues/291)) ([507e378](https://github.com/googleapis/nodejs-bigquery-storage/commit/507e3780553fa339ffccbba9a8f9ac930d1e9c6d)) + +## [3.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.0.0...v3.1.0) (2022-08-23) + + +### Features + +* allow users to set Apache Avro output format options through avro_serialization_options param in TableReadOptions message ([#284](https://github.com/googleapis/nodejs-bigquery-storage/issues/284)) ([99b8afc](https://github.com/googleapis/nodejs-bigquery-storage/commit/99b8afc3fb2aa1d47151b90924eab2016432034f)) + + +### Bug Fixes + +* better support for fallback mode ([#287](https://github.com/googleapis/nodejs-bigquery-storage/issues/287)) ([08b0bb2](https://github.com/googleapis/nodejs-bigquery-storage/commit/08b0bb2c300ce49a65121805ea674e9c56726a87)) +* change import long to require ([#289](https://github.com/googleapis/nodejs-bigquery-storage/issues/289)) ([63a3dc2](https://github.com/googleapis/nodejs-bigquery-storage/commit/63a3dc2bcbac775e8c41dd19248ef3cd4829c21f)) +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-bigquery-storage/issues/1546)) ([#290](https://github.com/googleapis/nodejs-bigquery-storage/issues/290)) ([1436388](https://github.com/googleapis/nodejs-bigquery-storage/commit/143638862040327e89c74c87a7018e2342576a95)) + +## [3.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v2.8.0...v3.0.0) (2022-06-29) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#272) + +### Features + +* Deprecate format specific `row_count` field in Read API ([#249](https://github.com/googleapis/nodejs-bigquery-storage/issues/249)) ([fb8acf1](https://github.com/googleapis/nodejs-bigquery-storage/commit/fb8acf1f4eab7823132159bcf5927c9eda6374e2)) + + +### Bug Fixes + +* fixes for dynamic routing and streaming descriptors ([#274](https://github.com/googleapis/nodejs-bigquery-storage/issues/274)) ([4271ea0](https://github.com/googleapis/nodejs-bigquery-storage/commit/4271ea0aaa98286696eb6822d0bef82a655a5811)) +* Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time ([#279](https://github.com/googleapis/nodejs-bigquery-storage/issues/279)) ([849cc23](https://github.com/googleapis/nodejs-bigquery-storage/commit/849cc237081e63a585264a62d49e9407d2f14450)) + + +### Build System + +* update library to use Node 12 ([#272](https://github.com/googleapis/nodejs-bigquery-storage/issues/272)) ([5e774e6](https://github.com/googleapis/nodejs-bigquery-storage/commit/5e774e614132f189362d56c502960d87200a11a0)) + +## [2.8.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.7.0...v2.8.0) (2021-12-30) + + +### Features + +* add write_mode support for BigQuery Storage Write API v1 ([#228](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/228)) ([18f3123](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/18f3123275716d49460f77cbbc1a4547412087d2)) + +## [2.7.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.2...v2.7.0) (2021-09-27) + + +### Features + +* add BigQuery Storage Write API v1 ([#209](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/209)) ([e0401d9](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e0401d96480cd192a2fad8075884d2a8abd417ca)) + +### [2.6.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.1...v2.6.2) (2021-09-07) + + +### Bug Fixes + +* **deps:** update dependency snappy to v7 ([#196](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/196)) ([37538ec](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/37538ec42815d0ce325416b4ee299ca3fb7b59fe)) + +### [2.6.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.6.0...v2.6.1) (2021-09-03) + + +### Bug Fixes + +* **build:** migrate to main branch ([#204](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/204)) ([759c9f0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/759c9f0442f9cec7eec94055da87b17ba7ef18ad)) + +## [2.6.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.4...v2.6.0) (2021-08-23) + + +### Features + +* turns on self-signed JWT feature flag ([#200](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/200)) ([ef2206c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ef2206cf1087c23d144fbc4b50363efb4c6deab2)) + +### [2.5.4](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.3...v2.5.4) (2021-08-17) + + +### Bug Fixes + +* **deps:** google-gax v2.24.1 ([#198](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/198)) ([c6f70de](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/c6f70de43641ee7a00237884cf3f40bbf1bed502)) + +### [2.5.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.2...v2.5.3) (2021-07-21) + + +### Bug Fixes + +* **deps:** google-gax v2.17.1 ([#188](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/188)) ([e49f7ee](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/e49f7ee0413948779842b3b9d4faf5addc4c4db6)) +* Updating WORKSPACE files to use the newest version of the Typescript generator. ([#190](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/190)) ([8649cc6](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/8649cc6ae0a4e6ae807ba9e5af438ca0ffc9592a)) + +### [2.5.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.1...v2.5.2) (2021-06-30) + + +### Bug Fixes + +* **deps:** google-gax v2.17.0 with mTLS ([#185](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/185)) ([1e9b856](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/1e9b8560cb3b60a60035c965ba1dabc24ad8f0c0)) + +### [2.5.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.5.0...v2.5.1) (2021-06-22) + + +### Bug Fixes + +* make request optional in all cases ([#179](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/179)) ([b0beaaa](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b0beaaa280e7599f75e0a439f4ecd4a9a6c059ad)) + +## [2.5.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.4.0...v2.5.0) (2021-06-07) + + +### Features + +* Add ZSTD compression as an option for Arrow. ([#165](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/165)) ([dc5a1d0](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/dc5a1d00f92f09dafbf0d3b1a9bf5ea4b5c43103)) + + +### Bug Fixes + +* **deps:** require google-gax v2.12.0 ([#158](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/158)) ([3347edd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/3347edd3781d7a37ae6a50b0d6885365bc2e4b2f)) +* GoogleAdsError missing using generator version after 1.3.0 ([#171](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/171)) ([8504761](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/850476101d06f4c3f903fb10ebb6709c1a6ffa95)) +* use require() to load JSON protos ([#161](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/161)) ([a16129f](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/a16129f7a56882e3070fa79f29b8b6018e7cd651)) + +## [2.4.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.3.0...v2.4.0) (2021-04-20) + + +### Features + +* add a Arrow compression options (Only LZ4 for now). ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) +* Return schema on first ReadRowsResponse ([cd23105](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/cd23105fff40a870888ca4a8608d1b85ed6b3639)) + +## [2.3.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.3...v2.3.0) (2021-01-09) + + +### Features + +* introduce style enumeration ([#135](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/135)) ([4a8f699](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/4a8f699472d67aae4300c458308c2fa4fa372592)) + +### [2.2.3](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.2...v2.2.3) (2020-11-25) + + +### Bug Fixes + +* **browser:** check for fetch on window ([d837dfc](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/d837dfc841cf3e77fbc2482dbabb149e2fc4f76a)) + +### [2.2.2](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.1...v2.2.2) (2020-11-07) + + +### Bug Fixes + +* do not modify options object, use defaultScopes ([#126](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/126)) ([6f8eb24](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6f8eb244b1b06a928641550b2390e03964a14981)) + +### [2.2.1](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.2.0...v2.2.1) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#84](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/84)) ([ab36886](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ab36886171cc6d94f66587f715d23e8cd4603f32)) + +## [2.2.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.1.0...v2.2.0) (2020-06-19) + + +### Features + +* promote library to GA ([#75](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/75)) ([7d7a67e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/7d7a67e9198e87cdcc4911d9505a121f1a1d9549)) + +## [2.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v2.0.0...v2.1.0) (2020-06-12) + + +### Features + +* **secrets:** begin migration to secret manager from keystore ([#70](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/70)) ([6513e8c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6513e8cf6195740b570b39fb645d8a1adafc0580)) + + +### Bug Fixes + +* handle fallback option properly ([#73](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/73)) ([ec6b88c](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/ec6b88cf87bf45e0f16935b8b27f15447aa385b9)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.1.0...v2.0.0) (2020-05-18) + + +### ⚠ BREAKING CHANGES + +* The library now supports Node.js v10+. The last version to support Node.js v8 is tagged legacy-8 on NPM. + +### Features + +* add V1 client ([#28](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/28)) ([da10a33](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/da10a33ee30a6fa0b447ef16c8b755e3ac05a87c)) +* additional type annotation ([#64](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/64)) ([2d76c0e](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2d76c0e16abedfaf106db063dc00f79e38166dad)) +* drop node8 support ([#39](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/39)) ([2f66ded](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/2f66ded8db03f71d3f2b37a1d91e4f3f232d5eaf)) + + +### Bug Fixes + +* regen protos and tests ([#63](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/63)) ([6293832](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/6293832961eedcdd57c24edc311f2c154781e34e)) +* remove eslint, update gax, fix generated protos, run the generator ([#49](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/49)) ([b5b9492](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/b5b9492a0c4b86b868a2b33c5c350301db29cc65)) + +## [1.1.0](https://www.github.com/googleapis/nodejs-bigquery-storage/compare/v1.0.0...v1.1.0) (2020-03-06) + + +### Features + +* deferred client initialization ([#23](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/23)) ([4741719](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/474171928bbdd5d0fb2eab7be868317f88cd18eb)) + +## 1.0.0 (2020-02-29) + + +### ⚠ BREAKING CHANGES + +* initial generation of library (#1) + +### Features + +* export protos in src/index.ts ([68b922a](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/68b922a4c242a6ad2e360758ef0658ca8451b62f)) +* initial generation of library ([#1](https://www.github.com/googleapis/nodejs-bigquery-storage/issues/1)) ([bd42fbd](https://www.github.com/googleapis/nodejs-bigquery-storage/commit/bd42fbd45616adaf36cdf197d2b0f3c811025e39)) diff --git a/handwritten/bigquery-storage/CODE_OF_CONDUCT.md b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..2add2547a81 --- /dev/null +++ b/handwritten/bigquery-storage/CODE_OF_CONDUCT.md @@ -0,0 +1,94 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/handwritten/bigquery-storage/CONTRIBUTING.md b/handwritten/bigquery-storage/CONTRIBUTING.md new file mode 100644 index 00000000000..3281e44c984 --- /dev/null +++ b/handwritten/bigquery-storage/CONTRIBUTING.md @@ -0,0 +1,76 @@ +# How to become a contributor and submit your own code + +**Table of contents** + +* [Contributor License Agreements](#contributor-license-agreements) +* [Contributing a patch](#contributing-a-patch) +* [Running the tests](#running-the-tests) +* [Releasing the library](#releasing-the-library) + +## Contributor License Agreements + +We'd love to accept your sample apps and patches! Before we can take them, we +have to jump a couple of legal hurdles. + +Please fill out either the individual or corporate Contributor License Agreement +(CLA). + + * If you are an individual writing original source code and you're sure you + own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). + * If you work for a company that wants to allow you to contribute your work, + then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). + +Follow either of the two links above to access the appropriate CLA and +instructions for how to sign and return it. Once we receive it, we'll be able to +accept your pull requests. + +## Contributing A Patch + +1. Submit an issue describing your proposed change to the repo in question. +1. The repo owner will respond to your issue promptly. +1. If your proposed change is accepted, and you haven't already done so, sign a + Contributor License Agreement (see details above). +1. Fork the desired repo, develop and test your code changes. +1. Ensure that your code adheres to the existing style in the code to which + you are contributing. +1. Ensure that your code has an appropriate set of tests which all pass. +1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. +1. Submit a pull request. + +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable billing for your project][billing]. +1. [Enable the Google BigQuery Storage API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + + +## Running the tests + +1. [Prepare your environment for Node.js setup][setup]. + +1. Install dependencies: + + npm install + +1. Run the tests: + + # Run unit tests. + npm test + + # Run sample integration tests. + npm run samples-test + + # Run all system tests. + npm run system-test + +1. Lint (and maybe fix) any changes: + + npm run fix + +[setup]: https://cloud.google.com/nodejs/docs/setup +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started \ No newline at end of file diff --git a/handwritten/bigquery-storage/LICENSE b/handwritten/bigquery-storage/LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/handwritten/bigquery-storage/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 94c64d45735..a83b819122b 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -1,5 +1,3 @@ -**_THIS REPOSITORY IS DEPRECATED. ALL OF ITS CONTENT AND HISTORY HAS BEEN MOVED TO [GOOGLE-CLOUD-NODE](https://github.com/googleapis/google-cloud-node/tree/main/packages/google-cloud-bigquery-storage)_** - [//]: # "This README.md file is auto-generated, all changes to this file will be lost." [//]: # "To regenerate it, use `python -m synthtool`." Google Cloud Platform logo diff --git a/handwritten/bigquery-storage/linkinator.config.json b/handwritten/bigquery-storage/linkinator.config.json new file mode 100644 index 00000000000..befd23c8633 --- /dev/null +++ b/handwritten/bigquery-storage/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/handwritten/bigquery-storage/owlbot.py b/handwritten/bigquery-storage/owlbot.py index 49fce7a6459..44b15e78f18 100644 --- a/handwritten/bigquery-storage/owlbot.py +++ b/handwritten/bigquery-storage/owlbot.py @@ -1,17 +1,20 @@ -# Copyright 2022 Google LLC +# Copyright 2020 Google LLC # -# Licensed under the Apache License, Version 2.0 (the License); +# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +"""This script is used to synthesize generated parts of this library.""" import synthtool.languages.node as node -node.owlbot_main(templates_excludes=[ -'README.md' -]) + +node.owlbot_main( + staging_excludes=['package.json', 'README.md', 'src/index.ts'], + templates_excludes=['src/index.ts'] +) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json new file mode 100644 index 00000000000..2d6e2462757 --- /dev/null +++ b/handwritten/bigquery-storage/package.json @@ -0,0 +1,54 @@ +{ + "name": "@google-cloud/bigquery-storage", + "version": "3.2.0", + "description": "Client for the BigQuery Storage API", + "repository": "googleapis/nodejs-bigquery-storage", + "license": "Apache-2.0", + "author": "Google LLC", + "files": [ + "build/src", + "build/protos" + ], + "main": "build/src/index.js", + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "prelint": "cd samples; npm link ../; npm install", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "precompile": "gts clean" + }, + "dependencies": { + "google-gax": "^3.5.2" + }, + "devDependencies": { + "@types/mocha": "^9.0.0", + "@types/node": "^18.0.0", + "@types/sinon": "^10.0.0", + "c8": "^7.1.0", + "gts": "^3.1.0", + "jsdoc": "^4.0.0", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "null-loader": "^4.0.0", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^9.0.0", + "typescript": "^4.8.3", + "webpack": "^5.0.0", + "webpack-cli": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto new file mode 100644 index 00000000000..1627fd12a0c --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +extend google.protobuf.FieldOptions { + // Setting the column_name extension allows users to reference + // bigquery column independently of the field name in the protocol buffer + // message. + // + // The intended use of this annotation is to reference a destination column + // named using characters unavailable for protobuf field names (e.g. unicode + // characters). + // + // More details about BigQuery naming limitations can be found here: + // https://cloud.google.com/bigquery/docs/schemas#column_names + // + // This extension is currently experimental. + optional string column_name = 454943157; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto new file mode 100644 index 00000000000..6d3f6080bf6 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "ArrowProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Arrow schema as specified in +// https://arrow.apache.org/docs/python/api/datatypes.html +// and serialized to bytes using IPC: +// https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc +// +// See code samples on how this message can be deserialized. +message ArrowSchema { + // IPC serialized Arrow schema. + bytes serialized_schema = 1; +} + +// Arrow RecordBatch. +message ArrowRecordBatch { + // IPC-serialized Arrow RecordBatch. + bytes serialized_record_batch = 1; + + // [Deprecated] The count of rows in `serialized_record_batch`. + // Please use the format-independent ReadRowsResponse.row_count instead. + int64 row_count = 2 [deprecated = true]; +} + +// Contains options specific to Arrow Serialization. +message ArrowSerializationOptions { + // Compression codec's supported by Arrow. + enum CompressionCodec { + // If unspecified no compression will be used. + COMPRESSION_UNSPECIFIED = 0; + + // LZ4 Frame (https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md) + LZ4_FRAME = 1; + + // Zstandard compression. + ZSTD = 2; + } + + // The compression codec to use for Arrow buffers in serialized record + // batches. + CompressionCodec buffer_compression = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto new file mode 100644 index 00000000000..e1ecb667b61 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -0,0 +1,56 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "AvroProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Avro schema. +message AvroSchema { + // Json serialized schema, as described at + // https://avro.apache.org/docs/1.8.1/spec.html. + string schema = 1; +} + +// Avro rows. +message AvroRows { + // Binary serialized rows in a block. + bytes serialized_binary_rows = 1; + + // [Deprecated] The count of rows in the returning block. + // Please use the format-independent ReadRowsResponse.row_count instead. + int64 row_count = 2 [deprecated = true]; +} + +// Contains options specific to Avro Serialization. +message AvroSerializationOptions { + // Enable displayName attribute in Avro schema. + // + // The Avro specification requires field names to be alphanumeric. By + // default, in cases when column names do not conform to these requirements + // (e.g. non-ascii unicode codepoints) and Avro is requested as an output + // format, the CreateReadSession call will fail. + // + // Setting this field to true, populates avro field names with a placeholder + // value and populates a "displayName" attribute for every avro field with the + // original column name. + bool enable_display_name_attribute = 1; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto new file mode 100644 index 00000000000..b3754acf7b3 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -0,0 +1,48 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "ProtoBufProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// ProtoSchema describes the schema of the serialized protocol buffer data rows. +message ProtoSchema { + // Descriptor for input message. The provided descriptor must be self + // contained, such that data rows sent can be fully decoded using only the + // single descriptor. For data rows that are compositions of multiple + // independent messages, this means the descriptor may need to be transformed + // to only use nested types: + // https://developers.google.com/protocol-buffers/docs/proto#nested + // + // For additional information for how proto types and values map onto BigQuery + // see: https://cloud.google.com/bigquery/docs/write-api#data_type_conversions + google.protobuf.DescriptorProto proto_descriptor = 1; +} + +message ProtoRows { + // A sequence of rows serialized as a Protocol Buffer. + // + // See https://developers.google.com/protocol-buffers/docs/overview for more + // information on deserializing this field. + repeated bytes serialized_rows = 1; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto new file mode 100644 index 00000000000..df602135b9d --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -0,0 +1,666 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1/arrow.proto"; +import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/cloud/bigquery/storage/v1/protobuf.proto"; +import "google/cloud/bigquery/storage/v1/stream.proto"; +import "google/cloud/bigquery/storage/v1/table.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; +import "google/rpc/status.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "StorageProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; +option (google.api.resource_definition) = { + type: "bigquery.googleapis.com/Table" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}" +}; + +// BigQuery Read API. +// +// The Read API can be used to read data from BigQuery. +service BigQueryRead { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Data is assigned to each stream such that roughly the same number of + // rows can be read from each stream. Because the server-side unit for + // assigning data is collections of rows, the API does not guarantee that + // each stream will return the same number or rows. Additionally, the + // limits are enforced based on the number of pre-filtered rows, so some + // filters can lead to lopsided assignments. + // + // Read sessions automatically expire 6 hours after they are created and do + // not require manual clean-up by the caller. + rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { + option (google.api.http) = { + post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" + body: "*" + }; + option (google.api.method_signature) = "parent,read_session,max_stream_count"; + } + + // Reads rows from the stream in the format prescribed by the ReadSession. + // Each response contains one or more table rows, up to a maximum of 100 MiB + // per response; read requests which attempt to read individual rows larger + // than 100 MiB will fail. + // + // Each request also returns a set of stream statistics reflecting the current + // state of the stream. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { + get: "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" + }; + option (google.api.method_signature) = "read_stream,offset"; + } + + // Splits a given `ReadStream` into two `ReadStream` objects. These + // `ReadStream` objects are referred to as the primary and the residual + // streams of the split. The original `ReadStream` can still be read from in + // the same manner as before. Both of the returned `ReadStream` objects can + // also be read from, and the rows returned by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back-to-back in the + // original `ReadStream`. Concretely, it is guaranteed that for streams + // original, primary, and residual, that original[0-j] = primary[0-j] and + // original[j-n] = residual[0-m] once the streams have been read to + // completion. + rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + }; + } +} + +// BigQuery Write API. +// +// The Write API can be used to write data to BigQuery. +// +// For supplementary information about the Write API, see: +// https://cloud.google.com/bigquery/docs/write-api +service BigQueryWrite { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.insertdata," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a write stream to the given table. + // Additionally, every table has a special stream named '_default' + // to which data can be written. This stream doesn't need to be created using + // CreateWriteStream. It is a stream that can be used simultaneously by any + // number of clients. Data written to this stream is considered committed as + // soon as an acknowledgement is received. + rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/datasets/*/tables/*}" + body: "write_stream" + }; + option (google.api.method_signature) = "parent,write_stream"; + } + + // Appends data to the given stream. + // + // If `offset` is specified, the `offset` is checked against the end of + // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an + // attempt is made to append to an offset beyond the current end of the stream + // or `ALREADY_EXISTS` if user provides an `offset` that has already been + // written to. User can retry with adjusted offset within the same RPC + // connection. If `offset` is not specified, append happens at the end of the + // stream. + // + // The response contains an optional offset at which the append + // happened. No offset information will be returned for appends to a + // default stream. + // + // Responses are received in the same order in which requests are sent. + // There will be one response for each successful inserted request. Responses + // may optionally embed error information if the originating AppendRequest was + // not successfully processed. + // + // The specifics of when successfully appended data is made visible to the + // table are governed by the type of stream: + // + // * For COMMITTED streams (which includes the default stream), data is + // visible immediately upon successful append. + // + // * For BUFFERED streams, data is made visible via a subsequent `FlushRows` + // rpc which advances a cursor to a newer offset in the stream. + // + // * For PENDING streams, data is not made visible until the stream itself is + // finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly + // committed via the `BatchCommitWriteStreams` rpc. + rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) { + option (google.api.http) = { + post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "write_stream"; + } + + // Gets information about a write stream. + rpc GetWriteStream(GetWriteStreamRequest) returns (WriteStream) { + option (google.api.http) = { + post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Finalize a write stream so that no new data can be appended to the + // stream. Finalize is not supported on the '_default' stream. + rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) { + option (google.api.http) = { + post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Atomically commits a group of `PENDING` streams that belong to the same + // `parent` table. + // + // Streams must be finalized before commit and cannot be committed multiple + // times. Once a stream is committed, data in the stream becomes available + // for read operations. + rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/datasets/*/tables/*}" + }; + option (google.api.method_signature) = "parent"; + } + + // Flushes rows to a BUFFERED stream. + // + // If users are appending rows to BUFFERED stream, flush operation is + // required in order for the rows to become available for reading. A + // Flush operation flushes up to any previously flushed offset in a BUFFERED + // stream, to the offset specified in the request. + // + // Flush is not supported on the _default stream, since it is not BUFFERED. + rpc FlushRows(FlushRowsRequest) returns (FlushRowsResponse) { + option (google.api.http) = { + post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "write_stream"; + } +} + +// Request message for `CreateReadSession`. +message CreateReadSessionRequest { + // Required. The request project that owns the session, in the form of + // `projects/{project_id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. Session to be created. + ReadSession read_session = 2 [(google.api.field_behavior) = REQUIRED]; + + // Max initial number of streams. If unset or zero, the server will + // provide a value of streams so as to produce reasonable throughput. Must be + // non-negative. The number of streams may be lower than the requested number, + // depending on the amount parallelism that is reasonable for the table. + // There is a default system max limit of 1,000. + // + // This must be greater than or equal to preferred_min_stream_count. + // Typically, clients should either leave this unset to let the system to + // determine an upper bound OR set this a size for the maximum "units of work" + // it can gracefully handle. + int32 max_stream_count = 3; + + // The minimum preferred stream count. This parameter can be used to inform + // the service that there is a desired lower bound on the number of streams. + // This is typically a target parallelism of the client (e.g. a Spark + // cluster with N-workers would set this to a low multiple of N to ensure + // good cluster utilization). + // + // The system will make a best effort to provide at least this number of + // streams, but in some cases might provide less. + int32 preferred_min_stream_count = 4; +} + +// Request message for `ReadRows`. +message ReadRowsRequest { + // Required. Stream to read rows from. + string read_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/ReadStream" + } + ]; + + // The offset requested must be less than the last row read from Read. + // Requesting a larger offset is undefined. If not specified, start reading + // from offset zero. + int64 offset = 2; +} + +// Information on if the current connection is being throttled. +message ThrottleState { + // How much this connection is being throttled. Zero means no throttling, + // 100 means fully throttled. + int32 throttle_percent = 1; +} + +// Estimated stream statistics for a given read Stream. +message StreamStats { + message Progress { + // The fraction of rows assigned to the stream that have been processed by + // the server so far, not including the rows in the current response + // message. + // + // This value, along with `at_response_end`, can be used to interpolate + // the progress made as the rows in the message are being processed using + // the following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the + // `at_response_start` value of the current response. + double at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the + // rows in the current response. + double at_response_end = 2; + } + + // Represents the progress of the current stream. + Progress progress = 2; +} + +// Response from calling `ReadRows` may include row data, progress and +// throttling information. +message ReadRowsResponse { + // Row data is returned in format specified during session creation. + oneof rows { + // Serialized row data in AVRO format. + AvroRows avro_rows = 3; + + // Serialized row data in Arrow RecordBatch format. + ArrowRecordBatch arrow_record_batch = 4; + } + + // Number of serialized rows in the rows block. + int64 row_count = 6; + + // Statistics for the stream. + StreamStats stats = 2; + + // Throttling state. If unset, the latest response still describes + // the current throttling status. + ThrottleState throttle_state = 5; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. This schema is equivalent to the one returned by + // CreateSession. This field is only populated in the first ReadRowsResponse + // RPC. + oneof schema { + // Output only. Avro schema. + AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Arrow schema. + ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + } +} + +// Request message for `SplitReadStream`. +message SplitReadStreamRequest { + // Required. Name of the stream to split. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/ReadStream" + } + ]; + + // A value in the range (0.0, 1.0) that specifies the fractional point at + // which the original stream should be split. The actual split point is + // evaluated on pre-filtered rows, so if a filter is provided, then there is + // no guarantee that the division of the rows between the new child streams + // will be proportional to this fractional value. Additionally, because the + // server-side unit for assigning data is collections of rows, this fraction + // will always map to a data storage boundary on the server side. + double fraction = 2; +} + +// Response message for `SplitReadStream`. +message SplitReadStreamResponse { + // Primary stream, which contains the beginning portion of + // |original_stream|. An empty value indicates that the original stream can no + // longer be split. + ReadStream primary_stream = 1; + + // Remainder stream, which contains the tail of |original_stream|. An empty + // value indicates that the original stream can no longer be split. + ReadStream remainder_stream = 2; +} + +// Request message for `CreateWriteStream`. +message CreateWriteStreamRequest { + // Required. Reference to the table to which the stream belongs, in the format + // of `projects/{project}/datasets/{dataset}/tables/{table}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } + ]; + + // Required. Stream to be created. + WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for `AppendRows`. +// +// Due to the nature of AppendRows being a bidirectional streaming RPC, certain +// parts of the AppendRowsRequest need only be specified for the first request +// sent each time the gRPC network connection is opened/reopened. +// +// The size of a single AppendRowsRequest must be less than 10 MB in size. +// Requests larger than this return an error, typically `INVALID_ARGUMENT`. +message AppendRowsRequest { + // ProtoData contains the data rows and schema when constructing append + // requests. + message ProtoData { + // Proto schema used to serialize the data. This value only needs to be + // provided as part of the first request on a gRPC network connection, + // and will be ignored for subsequent requests on the connection. + ProtoSchema writer_schema = 1; + + // Serialized row data in protobuf message format. + // Currently, the backend expects the serialized rows to adhere to + // proto2 semantics when appending rows, particularly with respect to + // how default values are encoded. + ProtoRows rows = 2; + } + + // Required. The write_stream identifies the target of the append operation, and only + // needs to be specified as part of the first request on the gRPC connection. + // If provided for subsequent requests, it must match the value of the first + // request. + // + // For explicitly created write streams, the format is: + // + // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` + // + // For the special default stream, the format is: + // + // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. + string write_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; + + // If present, the write is only performed if the next append offset is same + // as the provided value. If not present, the write is performed at the + // current end of stream. Specifying a value for this field is not allowed + // when calling AppendRows for the '_default' stream. + google.protobuf.Int64Value offset = 2; + + // Input rows. The `writer_schema` field must be specified at the initial + // request and currently, it will be ignored if specified in following + // requests. Following requests must have data in the same format as the + // initial request. + oneof rows { + // Rows in proto format. + ProtoData proto_rows = 4; + } + + // Id set by client to annotate its identity. Only initial request setting is + // respected. + string trace_id = 6; +} + +// Response message for `AppendRows`. +message AppendRowsResponse { + // AppendResult is returned for successful append requests. + message AppendResult { + // The row offset at which the last append occurred. The offset will not be + // set if appending using default streams. + google.protobuf.Int64Value offset = 1; + } + + oneof response { + // Result if the append is successful. + AppendResult append_result = 1; + + // Error returned when problems were encountered. If present, + // it indicates rows were not accepted into the system. + // Users can retry or continue with other append requests within the + // same connection. + // + // Additional information about error signalling: + // + // ALREADY_EXISTS: Happens when an append specified an offset, and the + // backend already has received data at this offset. Typically encountered + // in retry scenarios, and can be ignored. + // + // OUT_OF_RANGE: Returned when the specified offset in the stream is beyond + // the current end of the stream. + // + // INVALID_ARGUMENT: Indicates a malformed request or data. + // + // ABORTED: Request processing is aborted because of prior failures. The + // request can be retried if previous failure is addressed. + // + // INTERNAL: Indicates server side error(s) that can be retried. + google.rpc.Status error = 2; + } + + // If backend detects a schema update, pass it to user so that user can + // use it to input new type of message. It will be empty when no schema + // updates have occurred. + TableSchema updated_schema = 3; + + // If a request failed due to corrupted rows, no rows in the batch will be + // appended. The API will return row level error info, so that the caller can + // remove the bad rows and retry the request. + repeated RowError row_errors = 4; + + // The target of the append operation. Matches the write_stream in the + // corresponding request. + string write_stream = 5; +} + +// Request message for `GetWriteStreamRequest`. +message GetWriteStreamRequest { + // Required. Name of the stream to get, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; + + // Indicates whether to get full or partial view of the WriteStream. If + // not set, view returned will be basic. + WriteStreamView view = 3; +} + +// Request message for `BatchCommitWriteStreams`. +message BatchCommitWriteStreamsRequest { + // Required. Parent table that all the streams should belong to, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } + ]; + + // Required. The group of streams that will be committed atomically. + repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Response message for `BatchCommitWriteStreams`. +message BatchCommitWriteStreamsResponse { + // The time at which streams were committed in microseconds granularity. + // This field will only exist when there are no stream errors. + // **Note** if this field is not set, it means the commit was not successful. + google.protobuf.Timestamp commit_time = 1; + + // Stream level error if commit failed. Only streams with error will be in + // the list. + // If empty, there is no error and all streams are committed successfully. + // If non empty, certain streams have errors and ZERO stream is committed due + // to atomicity guarantee. + repeated StorageError stream_errors = 2; +} + +// Request message for invoking `FinalizeWriteStream`. +message FinalizeWriteStreamRequest { + // Required. Name of the stream to finalize, in the form of + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; +} + +// Response message for `FinalizeWriteStream`. +message FinalizeWriteStreamResponse { + // Number of rows in the finalized stream. + int64 row_count = 1; +} + +// Request message for `FlushRows`. +message FlushRowsRequest { + // Required. The stream that is the target of the flush operation. + string write_stream = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerystorage.googleapis.com/WriteStream" + } + ]; + + // Ending offset of the flush operation. Rows before this offset(including + // this offset) will be flushed. + google.protobuf.Int64Value offset = 2; +} + +// Respond message for `FlushRows`. +message FlushRowsResponse { + // The rows before this offset (including this offset) are flushed. + int64 offset = 1; +} + +// Structured custom BigQuery Storage error message. The error can be attached +// as error details in the returned rpc Status. In particular, the use of error +// codes allows more structured error handling, and reduces the need to evaluate +// unstructured error text strings. +message StorageError { + // Error code for `StorageError`. + enum StorageErrorCode { + // Default error. + STORAGE_ERROR_CODE_UNSPECIFIED = 0; + + // Table is not found in the system. + TABLE_NOT_FOUND = 1; + + // Stream is already committed. + STREAM_ALREADY_COMMITTED = 2; + + // Stream is not found. + STREAM_NOT_FOUND = 3; + + // Invalid Stream type. + // For example, you try to commit a stream that is not pending. + INVALID_STREAM_TYPE = 4; + + // Invalid Stream state. + // For example, you try to commit a stream that is not finalized or is + // garbaged. + INVALID_STREAM_STATE = 5; + + // Stream is finalized. + STREAM_FINALIZED = 6; + + // There is a schema mismatch and it is caused by user schema has extra + // field than bigquery schema. + SCHEMA_MISMATCH_EXTRA_FIELDS = 7; + + // Offset already exists. + OFFSET_ALREADY_EXISTS = 8; + + // Offset out of range. + OFFSET_OUT_OF_RANGE = 9; + } + + // BigQuery Storage specific error code. + StorageErrorCode code = 1; + + // Name of the failed entity. + string entity = 2; + + // Message that describes the error. + string error_message = 3; +} + +// The message that presents row level error info in a request. +message RowError { + // Error code for `RowError`. + enum RowErrorCode { + // Default error. + ROW_ERROR_CODE_UNSPECIFIED = 0; + + // One or more fields in the row has errors. + FIELDS_ERROR = 1; + } + + // Index of the malformed row in the request. + int64 index = 1; + + // Structured error reason for a row error. + RowErrorCode code = 2; + + // Description of the issue encountered when processing the row. + string message = 3; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto new file mode 100644 index 00000000000..fe71adfa6b7 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -0,0 +1,286 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1/arrow.proto"; +import "google/cloud/bigquery/storage/v1/avro.proto"; +import "google/cloud/bigquery/storage/v1/table.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "StreamProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Data format for input or output data. +enum DataFormat { + // Data format is unspecified. + DATA_FORMAT_UNSPECIFIED = 0; + + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + AVRO = 1; + + // Arrow is a standard open source column-based message format. + // See https://arrow.apache.org/ for more details. + ARROW = 2; +} + +// Information about the ReadSession. +message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + + // Additional attributes when reading a table. + message TableModifiers { + // The snapshot time of the table. If not set, interpreted as now. + google.protobuf.Timestamp snapshot_time = 1; + } + + // Options dictating how we read a table. + message TableReadOptions { + // Optional. The names of the fields in the table to be returned. If no + // field names are specified, then all fields in the table are returned. + // + // Nested fields -- the child elements of a STRUCT field -- can be selected + // individually using their fully-qualified names, and will be returned as + // record fields containing only the selected nested fields. If a STRUCT + // field is specified in the selected fields list, all of the child elements + // will be returned. + // + // As an example, consider a table with the following schema: + // + // { + // "name": "struct_field", + // "type": "RECORD", + // "mode": "NULLABLE", + // "fields": [ + // { + // "name": "string_field1", + // "type": "STRING", + // . "mode": "NULLABLE" + // }, + // { + // "name": "string_field2", + // "type": "STRING", + // "mode": "NULLABLE" + // } + // ] + // } + // + // Specifying "struct_field" in the selected fields list will result in a + // read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // string_field2 + // } + // + // Specifying "struct_field.string_field1" in the selected fields list will + // result in a read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // } + // + // The order of the fields in the read session schema is derived from the + // table schema and does not correspond to the order in which the fields are + // specified in this list. + repeated string selected_fields = 1; + + // SQL text filtering statement, similar to a WHERE clause in a query. + // Aggregates are not supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + // + // Restricted to a maximum length for 1 MB. + string row_restriction = 2; + + oneof output_format_serialization_options { + // Optional. Options specific to the Apache Arrow output format. + ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Options specific to the Apache Avro output format + AvroSerializationOptions avro_serialization_options = 4 [(google.api.field_behavior) = OPTIONAL]; + } + } + + // Output only. Unique identifier for the session, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. The expire_time is + // automatically assigned and currently cannot be specified or updated. + google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported. + DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. + oneof schema { + // Output only. Avro schema. + AvroSchema avro_schema = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Arrow schema. + ArrowSchema arrow_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + } + + // Immutable. Table that this ReadSession is reading from, in the form + // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` + string table = 6 [ + (google.api.field_behavior) = IMMUTABLE, + (google.api.resource_reference) = { + type: "bigquery.googleapis.com/Table" + } + ]; + + // Optional. Any modifiers which are applied when reading from the specified table. + TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Read options for this session (e.g. column selection, filters). + TableReadOptions read_options = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. A list of streams created with the session. + // + // At least one stream is created with the session. In the future, larger + // request_stream_count values *may* result in this list being unpopulated, + // in that case, the user will need to use a List method to get the streams + // instead, which is not yet available. + repeated ReadStream streams = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An estimate on the number of bytes this session will scan when + // all streams are completely consumed. This estimate is based on + // metadata from the table which might be incomplete or stale. + int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. ID set by client to annotate a session identity. This does not need + // to be strictly unique, but instead the same ID should be used to group + // logically connected sessions (e.g. All using the same ID for all sessions + // needed to complete a Spark SQL query is reasonable). + // + // Maximum length is 256 bytes. + string trace_id = 13 [(google.api.field_behavior) = OPTIONAL]; +} + +// Information about a single stream that gets data out of the storage system. +// Most of the information about `ReadStream` instances is aggregated, making +// `ReadStream` lightweight. +message ReadStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadStream" + pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + }; + + // Output only. Name of the stream, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// WriteStreamView is a view enum that controls what details about a write +// stream should be returned. +enum WriteStreamView { + // The default / unset value. + WRITE_STREAM_VIEW_UNSPECIFIED = 0; + + // The BASIC projection returns basic metadata about a write stream. The + // basic view does not include schema information. This is the default view + // returned by GetWriteStream. + BASIC = 1; + + // The FULL projection returns all available write stream metadata, including + // the schema. CreateWriteStream returns the full projection of write stream + // metadata. + FULL = 2; +} + +// Information about a single stream that gets data inside the storage system. +message WriteStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/WriteStream" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" + }; + + // Type enum of the stream. + enum Type { + // Unknown type. + TYPE_UNSPECIFIED = 0; + + // Data will commit automatically and appear as soon as the write is + // acknowledged. + COMMITTED = 1; + + // Data is invisible until the stream is committed. + PENDING = 2; + + // Data is only visible up to the offset to which it was flushed. + BUFFERED = 3; + } + + // Mode enum of the stream. + enum WriteMode { + // Unknown type. + WRITE_MODE_UNSPECIFIED = 0; + + // Insert new records into the table. + // It is the default value if customers do not specify it. + INSERT = 1; + } + + // Output only. Name of the stream, in the form + // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Type of the stream. + Type type = 2 [(google.api.field_behavior) = IMMUTABLE]; + + // Output only. Create time of the stream. For the _default stream, this is the + // creation_time of the table. + google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Commit time of the stream. + // If a stream is of `COMMITTED` type, then it will have a commit_time same as + // `create_time`. If the stream is of `PENDING` type, empty commit_time + // means it is not committed. + google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The schema of the destination table. It is only returned in + // `CreateWriteStream` response. Caller should generate data that's + // compatible with this schema to send in initial `AppendRowsRequest`. + // The table schema could go out of date during the life time of the stream. + TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Immutable. Mode of the stream. + WriteMode write_mode = 7 [(google.api.field_behavior) = IMMUTABLE]; + + // Immutable. The geographic location where the stream's dataset resides. See + // https://cloud.google.com/bigquery/docs/locations for supported + // locations. + string location = 8 [(google.api.field_behavior) = IMMUTABLE]; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto new file mode 100644 index 00000000000..fa4f840c580 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -0,0 +1,166 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option java_multiple_files = true; +option java_outer_classname = "TableProto"; +option java_package = "com.google.cloud.bigquery.storage.v1"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1"; + +// Schema of a table. This schema is a subset of +// google.cloud.bigquery.v2.TableSchema containing information necessary to +// generate valid message to write to BigQuery. +message TableSchema { + // Describes the fields in a table. + repeated TableFieldSchema fields = 1; +} + +// TableFieldSchema defines a single field/column within a table schema. +message TableFieldSchema { + enum Type { + // Illegal value + TYPE_UNSPECIFIED = 0; + + // 64K, UTF8 + STRING = 1; + + // 64-bit signed + INT64 = 2; + + // 64-bit IEEE floating point + DOUBLE = 3; + + // Aggregate type + STRUCT = 4; + + // 64K, Binary + BYTES = 5; + + // 2-valued + BOOL = 6; + + // 64-bit signed usec since UTC epoch + TIMESTAMP = 7; + + // Civil date - Year, Month, Day + DATE = 8; + + // Civil time - Hour, Minute, Second, Microseconds + TIME = 9; + + // Combination of civil date and civil time + DATETIME = 10; + + // Geography object + GEOGRAPHY = 11; + + // Numeric value + NUMERIC = 12; + + // BigNumeric value + BIGNUMERIC = 13; + + // Interval + INTERVAL = 14; + + // JSON, String + JSON = 15; + } + + enum Mode { + // Illegal value + MODE_UNSPECIFIED = 0; + + NULLABLE = 1; + + REQUIRED = 2; + + REPEATED = 3; + } + + // Required. The field name. The name must contain only letters (a-z, A-Z), + // numbers (0-9), or underscores (_), and must start with a letter or + // underscore. The maximum length is 128 characters. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The field data type. + Type type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The field mode. The default value is NULLABLE. + Mode mode = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Describes the nested schema fields if the type property is set to STRUCT. + repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The field description. The maximum length is 1,024 characters. + string description = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Maximum length of values of this field for STRINGS or BYTES. + // + // If max_length is not specified, no maximum length constraint is imposed + // on this field. + // + // If type = "STRING", then max_length represents the maximum UTF-8 + // length of strings in this field. + // + // If type = "BYTES", then max_length represents the maximum number of + // bytes in this field. + // + // It is invalid to set this field if type is not "STRING" or "BYTES". + int64 max_length = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Precision (maximum number of total digits in base 10) and scale + // (maximum number of digits in the fractional part in base 10) constraints + // for values of this field for NUMERIC or BIGNUMERIC. + // + // It is invalid to set precision or scale if type is not "NUMERIC" or + // "BIGNUMERIC". + // + // If precision and scale are not specified, no value range constraint is + // imposed on this field insofar as values are permitted by the type. + // + // Values of this NUMERIC or BIGNUMERIC field must be in this range when: + // + // * Precision (P) and scale (S) are specified: + // [-10^(P-S) + 10^(-S), 10^(P-S) - 10^(-S)] + // * Precision (P) is specified but not scale (and thus scale is + // interpreted to be equal to zero): + // [-10^P + 1, 10^P - 1]. + // + // Acceptable values for precision and scale if both are specified: + // + // * If type = "NUMERIC": + // 1 <= precision - scale <= 29 and 0 <= scale <= 9. + // * If type = "BIGNUMERIC": + // 1 <= precision - scale <= 38 and 0 <= scale <= 38. + // + // Acceptable values for precision if only precision is specified but not + // scale (and thus scale is interpreted to be equal to zero): + // + // * If type = "NUMERIC": 1 <= precision <= 29. + // * If type = "BIGNUMERIC": 1 <= precision <= 38. + // + // If scale is specified but not precision, then it is invalid. + int64 precision = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. See documentation for precision. + int64 scale = 9 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto new file mode 100644 index 00000000000..f70c61c7246 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -0,0 +1,36 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "ArrowProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Arrow schema. +message ArrowSchema { + // IPC serialized Arrow schema. + bytes serialized_schema = 1; +} + +// Arrow RecordBatch. +message ArrowRecordBatch { + // IPC serialized Arrow RecordBatch. + bytes serialized_record_batch = 1; + + // The count of rows in the returning block. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto new file mode 100644 index 00000000000..7d034a28a7e --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -0,0 +1,37 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "AvroProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Avro schema. +message AvroSchema { + // Json serialized schema, as described at + // https://avro.apache.org/docs/1.8.1/spec.html + string schema = 1; +} + +// Avro rows. +message AvroRows { + // Binary serialized rows in a block. + bytes serialized_binary_rows = 1; + + // The count of rows in the returning block. + int64 row_count = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto new file mode 100644 index 00000000000..1ff8d8b5eb6 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -0,0 +1,39 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Options dictating how we read a table. +message TableReadOptions { + // Optional. Names of the fields in the table that should be read. If empty, + // all fields will be read. If the specified field is a nested field, all the + // sub-fields in the field will be selected. The output field order is + // unrelated to the order of fields in selected_fields. + repeated string selected_fields = 1; + + // Optional. SQL text filtering statement, similar to a WHERE clause in + // a query. Aggregates are not supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + string row_restriction = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto new file mode 100644 index 00000000000..0d311418a49 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -0,0 +1,405 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; +import "google/cloud/bigquery/storage/v1beta1/avro.proto"; +import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; +import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// BigQuery storage API. +// +// The BigQuery storage API can be used to read data stored in BigQuery. +service BigQueryStorage { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Read sessions automatically expire 24 hours after they are created and do + // not require manual clean-up by the caller. + rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { + option (google.api.http) = { + post: "/v1beta1/{table_reference.project_id=projects/*}" + body: "*" + additional_bindings { + post: "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}" + body: "*" + } + }; + option (google.api.method_signature) = "table_reference,parent,requested_streams"; + } + + // Reads rows from the table in the format prescribed by the read session. + // Each response contains one or more table rows, up to a maximum of 10 MiB + // per response; read requests which attempt to read individual rows larger + // than this will fail. + // + // Each request also returns a set of stream statistics reflecting the + // estimated total number of rows in the read stream. This number is computed + // based on the total table size and the number of active streams in the read + // session, and may change as other streams continue to read data. + rpc ReadRows(ReadRowsRequest) returns (stream ReadRowsResponse) { + option (google.api.http) = { + get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" + }; + option (google.api.method_signature) = "read_position"; + } + + // Creates additional streams for a ReadSession. This API can be used to + // dynamically adjust the parallelism of a batch processing task upwards by + // adding additional workers. + rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) { + option (google.api.http) = { + post: "/v1beta1/{session.name=projects/*/sessions/*}" + body: "*" + }; + option (google.api.method_signature) = "session,requested_streams"; + } + + // Triggers the graceful termination of a single stream in a ReadSession. This + // API can be used to dynamically adjust the parallelism of a batch processing + // task downwards without losing data. + // + // This API does not delete the stream -- it remains visible in the + // ReadSession, and any data processed by the stream is not released to other + // streams. However, no additional data will be assigned to the stream once + // this call completes. Callers must continue reading data on the stream until + // the end of the stream is reached so that data which has already been + // assigned to the stream will be processed. + // + // This method will return an error if there are no other live streams + // in the Session, or if SplitReadStream() has been called on the given + // Stream. + rpc FinalizeStream(FinalizeStreamRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1beta1/{stream.name=projects/*/streams/*}" + body: "*" + }; + option (google.api.method_signature) = "stream"; + } + + // Splits a given read stream into two Streams. These streams are referred to + // as the primary and the residual of the split. The original stream can still + // be read from in the same manner as before. Both of the returned streams can + // also be read from, and the total rows return by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back to back in the + // original Stream. Concretely, it is guaranteed that for streams Original, + // Primary, and Residual, that Original[0-j] = Primary[0-j] and + // Original[j-n] = Residual[0-m] once the streams have been read to + // completion. + // + // This method is guaranteed to be idempotent. + rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + option (google.api.http) = { + get: "/v1beta1/{original_stream.name=projects/*/streams/*}" + }; + option (google.api.method_signature) = "original_stream"; + } +} + +// Information about a single data stream within a read session. +message Stream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/Stream" + pattern: "projects/{project}/locations/{location}/streams/{stream}" + }; + + // Name of the stream, in the form + // `projects/{project_id}/locations/{location}/streams/{stream_id}`. + string name = 1; +} + +// Expresses a point within a given stream using an offset position. +message StreamPosition { + // Identifier for a given Stream. + Stream stream = 1; + + // Position in the stream. + int64 offset = 2; +} + +// Information returned from a `CreateReadSession` request. +message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + + // Unique identifier for the session, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}`. + string name = 1; + + // Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. + google.protobuf.Timestamp expire_time = 2; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. + oneof schema { + // Avro schema. + AvroSchema avro_schema = 5; + + // Arrow schema. + ArrowSchema arrow_schema = 6; + } + + // Streams associated with this session. + repeated Stream streams = 4; + + // Table that this ReadSession is reading from. + TableReference table_reference = 7; + + // Any modifiers which are applied when reading from the specified table. + TableModifiers table_modifiers = 8; + + // The strategy to use for distributing data among the streams. + ShardingStrategy sharding_strategy = 9; +} + +// Creates a new read session, which may include additional options such as +// requested parallelism, projection filters and constraints. +message CreateReadSessionRequest { + // Required. Reference to the table to read. + TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. String of the form `projects/{project_id}` indicating the + // project this ReadSession is associated with. This is the project that will + // be billed for usage. + string parent = 6 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Any modifiers to the Table (e.g. snapshot timestamp). + TableModifiers table_modifiers = 2; + + // Initial number of streams. If unset or 0, we will + // provide a value of streams so as to produce reasonable throughput. Must be + // non-negative. The number of streams may be lower than the requested number, + // depending on the amount parallelism that is reasonable for the table and + // the maximum amount of parallelism allowed by the system. + // + // Streams must be read starting from offset 0. + int32 requested_streams = 3; + + // Read options for this session (e.g. column selection, filters). + TableReadOptions read_options = 4; + + // Data output format. Currently default to Avro. + DataFormat format = 5; + + // The strategy to use for distributing data among multiple streams. Currently + // defaults to liquid sharding. + ShardingStrategy sharding_strategy = 7; +} + +// Data format for input or output data. +enum DataFormat { + // Data format is unspecified. + DATA_FORMAT_UNSPECIFIED = 0; + + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + AVRO = 1; + + ARROW = 3; +} + +// Strategy for distributing data among multiple streams in a read session. +enum ShardingStrategy { + // Same as LIQUID. + SHARDING_STRATEGY_UNSPECIFIED = 0; + + // Assigns data to each stream based on the client's read rate. The faster the + // client reads from a stream, the more data is assigned to the stream. In + // this strategy, it's possible to read all data from a single stream even if + // there are other streams present. + LIQUID = 1; + + // Assigns data to each stream such that roughly the same number of rows can + // be read from each stream. Because the server-side unit for assigning data + // is collections of rows, the API does not guarantee that each stream will + // return the same number or rows. Additionally, the limits are enforced based + // on the number of pre-filtering rows, so some filters can lead to lopsided + // assignments. + BALANCED = 2; +} + +// Requesting row data via `ReadRows` must provide Stream position information. +message ReadRowsRequest { + // Required. Identifier of the position in the stream to start reading from. + // The offset requested must be less than the last row read from ReadRows. + // Requesting a larger offset is undefined. + StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Progress information for a given Stream. +message StreamStatus { + // Number of estimated rows in the current stream. May change over time as + // different readers in the stream progress at rates which are relatively fast + // or slow. + int64 estimated_row_count = 1; + + // A value in the range [0.0, 1.0] that represents the fraction of rows + // assigned to this stream that have been processed by the server. In the + // presence of read filters, the server may process more rows than it returns, + // so this value reflects progress through the pre-filtering rows. + // + // This value is only populated for sessions created through the BALANCED + // sharding strategy. + float fraction_consumed = 2; + + // Represents the progress of the current stream. + Progress progress = 4; + + // Whether this stream can be split. For sessions that use the LIQUID sharding + // strategy, this value is always false. For BALANCED sessions, this value is + // false when enough data have been read such that no more splits are possible + // at that point or beyond. For small tables or streams that are the result of + // a chain of splits, this value may never be true. + bool is_splittable = 3; +} + +message Progress { + // The fraction of rows assigned to the stream that have been processed by the + // server so far, not including the rows in the current response message. + // + // This value, along with `at_response_end`, can be used to interpolate the + // progress made as the rows in the message are being processed using the + // following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the `at_response_start` + // value of the current response. + float at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the rows in + // the current response. + float at_response_end = 2; +} + +// Information on if the current connection is being throttled. +message ThrottleStatus { + // How much this connection is being throttled. + // 0 is no throttling, 100 is completely throttled. + int32 throttle_percent = 1; +} + +// Response from calling `ReadRows` may include row data, progress and +// throttling information. +message ReadRowsResponse { + // Row data is returned in format specified during session creation. + oneof rows { + // Serialized row data in AVRO format. + AvroRows avro_rows = 3; + + // Serialized row data in Arrow RecordBatch format. + ArrowRecordBatch arrow_record_batch = 4; + } + + // Number of serialized rows in the rows block. This value is recorded here, + // in addition to the row_count values in the output-specific messages in + // `rows`, so that code which needs to record progress through the stream can + // do so in an output format-independent way. + int64 row_count = 6; + + // Estimated stream statistics. + StreamStatus status = 2; + + // Throttling status. If unset, the latest response still describes + // the current throttling status. + ThrottleStatus throttle_status = 5; +} + +// Information needed to request additional streams for an established read +// session. +message BatchCreateReadSessionStreamsRequest { + // Required. Must be a non-expired session obtained from a call to + // CreateReadSession. Only the name field needs to be set. + ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Number of new streams requested. Must be positive. + // Number of added streams may be less than this, see CreateReadSessionRequest + // for more information. + int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The response from `BatchCreateReadSessionStreams` returns the stream +// identifiers for the newly created streams. +message BatchCreateReadSessionStreamsResponse { + // Newly added streams. + repeated Stream streams = 1; +} + +// Request information for invoking `FinalizeStream`. +message FinalizeStreamRequest { + // Required. Stream to finalize. + Stream stream = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request information for `SplitReadStream`. +message SplitReadStreamRequest { + // Required. Stream to split. + Stream original_stream = 1 [(google.api.field_behavior) = REQUIRED]; + + // A value in the range (0.0, 1.0) that specifies the fractional point at + // which the original stream should be split. The actual split point is + // evaluated on pre-filtered rows, so if a filter is provided, then there is + // no guarantee that the division of the rows between the new child streams + // will be proportional to this fractional value. Additionally, because the + // server-side unit for assigning data is collections of rows, this fraction + // will always map to to a data storage boundary on the server side. + float fraction = 2; +} + +// Response from `SplitReadStream`. +message SplitReadStreamResponse { + // Primary stream, which contains the beginning portion of + // |original_stream|. An empty value indicates that the original stream can no + // longer be split. + Stream primary_stream = 1; + + // Remainder stream, which contains the tail of |original_stream|. An empty + // value indicates that the original stream can no longer be split. + Stream remainder_stream = 2; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto new file mode 100644 index 00000000000..22c940c0e6b --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -0,0 +1,41 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta1; + +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option java_outer_classname = "TableReferenceProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta1"; + +// Table reference that includes just the 3 strings needed to identify a table. +message TableReference { + // The assigned project ID of the project. + string project_id = 1; + + // The ID of the dataset in the above project. + string dataset_id = 2; + + // The ID of the table in the above dataset. + string table_id = 3; +} + +// All fields in this message optional. +message TableModifiers { + // The snapshot time of the table. If not set, interpreted as now. + google.protobuf.Timestamp snapshot_time = 1; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts new file mode 100644 index 00000000000..7d4eae00205 --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -0,0 +1,11968 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import type {protobuf as $protobuf} from "google-gax"; +import Long = require("long"); +/** Namespace google. */ +export namespace google { + + /** Namespace cloud. */ + namespace cloud { + + /** Namespace bigquery. */ + namespace bigquery { + + /** Namespace storage. */ + namespace storage { + + /** Namespace v1. */ + namespace v1 { + + /** Properties of an ArrowSchema. */ + interface IArrowSchema { + + /** ArrowSchema serializedSchema */ + serializedSchema?: (Uint8Array|string|null); + } + + /** Represents an ArrowSchema. */ + class ArrowSchema implements IArrowSchema { + + /** + * Constructs a new ArrowSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSchema); + + /** ArrowSchema serializedSchema. */ + public serializedSchema: (Uint8Array|string); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSchema): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Verifies an ArrowSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSchema; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @param message ArrowSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an ArrowRecordBatch. */ + interface IArrowRecordBatch { + + /** ArrowRecordBatch serializedRecordBatch */ + serializedRecordBatch?: (Uint8Array|string|null); + + /** ArrowRecordBatch rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an ArrowRecordBatch. */ + class ArrowRecordBatch implements IArrowRecordBatch { + + /** + * Constructs a new ArrowRecordBatch. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch); + + /** ArrowRecordBatch serializedRecordBatch. */ + public serializedRecordBatch: (Uint8Array|string); + + /** ArrowRecordBatch rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowRecordBatch instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Verifies an ArrowRecordBatch message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowRecordBatch + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowRecordBatch; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @param message ArrowRecordBatch + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowRecordBatch + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an ArrowSerializationOptions. */ + interface IArrowSerializationOptions { + + /** ArrowSerializationOptions bufferCompression */ + bufferCompression?: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null); + } + + /** Represents an ArrowSerializationOptions. */ + class ArrowSerializationOptions implements IArrowSerializationOptions { + + /** + * Constructs a new ArrowSerializationOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions); + + /** ArrowSerializationOptions bufferCompression. */ + public bufferCompression: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec); + + /** + * Creates a new ArrowSerializationOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSerializationOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IArrowSerializationOptions): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @param message ArrowSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @param message ArrowSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IArrowSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Verifies an ArrowSerializationOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSerializationOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ArrowSerializationOptions; + + /** + * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. + * @param message ArrowSerializationOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ArrowSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSerializationOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSerializationOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace ArrowSerializationOptions { + + /** CompressionCodec enum. */ + enum CompressionCodec { + COMPRESSION_UNSPECIFIED = 0, + LZ4_FRAME = 1, + ZSTD = 2 + } + } + + /** Properties of an AvroSchema. */ + interface IAvroSchema { + + /** AvroSchema schema */ + schema?: (string|null); + } + + /** Represents an AvroSchema. */ + class AvroSchema implements IAvroSchema { + + /** + * Constructs a new AvroSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSchema); + + /** AvroSchema schema. */ + public schema: string; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSchema): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Verifies an AvroSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSchema; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @param message AvroSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an AvroRows. */ + interface IAvroRows { + + /** AvroRows serializedBinaryRows */ + serializedBinaryRows?: (Uint8Array|string|null); + + /** AvroRows rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an AvroRows. */ + class AvroRows implements IAvroRows { + + /** + * Constructs a new AvroRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroRows); + + /** AvroRows serializedBinaryRows. */ + public serializedBinaryRows: (Uint8Array|string); + + /** AvroRows rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new AvroRows instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroRows): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Verifies an AvroRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroRows; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @param message AvroRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an AvroSerializationOptions. */ + interface IAvroSerializationOptions { + + /** AvroSerializationOptions enableDisplayNameAttribute */ + enableDisplayNameAttribute?: (boolean|null); + } + + /** Represents an AvroSerializationOptions. */ + class AvroSerializationOptions implements IAvroSerializationOptions { + + /** + * Constructs a new AvroSerializationOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions); + + /** AvroSerializationOptions enableDisplayNameAttribute. */ + public enableDisplayNameAttribute: boolean; + + /** + * Creates a new AvroSerializationOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSerializationOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAvroSerializationOptions): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @param message AvroSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @param message AvroSerializationOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAvroSerializationOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Verifies an AvroSerializationOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSerializationOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AvroSerializationOptions; + + /** + * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. + * @param message AvroSerializationOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AvroSerializationOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSerializationOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSerializationOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ProtoSchema. */ + interface IProtoSchema { + + /** ProtoSchema protoDescriptor */ + protoDescriptor?: (google.protobuf.IDescriptorProto|null); + } + + /** Represents a ProtoSchema. */ + class ProtoSchema implements IProtoSchema { + + /** + * Constructs a new ProtoSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IProtoSchema); + + /** ProtoSchema protoDescriptor. */ + public protoDescriptor?: (google.protobuf.IDescriptorProto|null); + + /** + * Creates a new ProtoSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ProtoSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IProtoSchema): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @param message ProtoSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @param message ProtoSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Verifies a ProtoSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ProtoSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoSchema; + + /** + * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. + * @param message ProtoSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ProtoSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ProtoSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ProtoRows. */ + interface IProtoRows { + + /** ProtoRows serializedRows */ + serializedRows?: (Uint8Array[]|null); + } + + /** Represents a ProtoRows. */ + class ProtoRows implements IProtoRows { + + /** + * Constructs a new ProtoRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IProtoRows); + + /** ProtoRows serializedRows. */ + public serializedRows: Uint8Array[]; + + /** + * Creates a new ProtoRows instance using the specified properties. + * @param [properties] Properties to set + * @returns ProtoRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IProtoRows): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @param message ProtoRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @param message ProtoRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IProtoRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ProtoRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Decodes a ProtoRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Verifies a ProtoRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ProtoRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ProtoRows; + + /** + * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. + * @param message ProtoRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ProtoRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ProtoRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Represents a BigQueryRead */ + class BigQueryRead extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryRead service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryRead service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryRead; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadSession + */ + public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback): void; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @returns Promise + */ + public createReadSession(request: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): Promise; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + */ + public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback): void; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @returns Promise + */ + public readRows(request: google.cloud.bigquery.storage.v1.IReadRowsRequest): Promise; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback): void; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @returns Promise + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): Promise; + } + + namespace BigQueryRead { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. + * @param error Error, if any + * @param [response] ReadSession + */ + type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadSession) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. + * @param error Error, if any + * @param [response] ReadRowsResponse + */ + type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.ReadRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. + * @param error Error, if any + * @param [response] SplitReadStreamResponse + */ + type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.SplitReadStreamResponse) => void; + } + + /** Represents a BigQueryWrite */ + class BigQueryWrite extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryWrite service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryWrite service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryWrite; + + /** + * Calls CreateWriteStream. + * @param request CreateWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and WriteStream + */ + public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback): void; + + /** + * Calls CreateWriteStream. + * @param request CreateWriteStreamRequest message or plain object + * @returns Promise + */ + public createWriteStream(request: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): Promise; + + /** + * Calls AppendRows. + * @param request AppendRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and AppendRowsResponse + */ + public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback): void; + + /** + * Calls AppendRows. + * @param request AppendRowsRequest message or plain object + * @returns Promise + */ + public appendRows(request: google.cloud.bigquery.storage.v1.IAppendRowsRequest): Promise; + + /** + * Calls GetWriteStream. + * @param request GetWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and WriteStream + */ + public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback): void; + + /** + * Calls GetWriteStream. + * @param request GetWriteStreamRequest message or plain object + * @returns Promise + */ + public getWriteStream(request: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): Promise; + + /** + * Calls FinalizeWriteStream. + * @param request FinalizeWriteStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse + */ + public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback): void; + + /** + * Calls FinalizeWriteStream. + * @param request FinalizeWriteStreamRequest message or plain object + * @returns Promise + */ + public finalizeWriteStream(request: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): Promise; + + /** + * Calls BatchCommitWriteStreams. + * @param request BatchCommitWriteStreamsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse + */ + public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback): void; + + /** + * Calls BatchCommitWriteStreams. + * @param request BatchCommitWriteStreamsRequest message or plain object + * @returns Promise + */ + public batchCommitWriteStreams(request: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): Promise; + + /** + * Calls FlushRows. + * @param request FlushRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and FlushRowsResponse + */ + public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest, callback: google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback): void; + + /** + * Calls FlushRows. + * @param request FlushRowsRequest message or plain object + * @returns Promise + */ + public flushRows(request: google.cloud.bigquery.storage.v1.IFlushRowsRequest): Promise; + } + + namespace BigQueryWrite { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. + * @param error Error, if any + * @param [response] WriteStream + */ + type CreateWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. + * @param error Error, if any + * @param [response] AppendRowsResponse + */ + type AppendRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.AppendRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. + * @param error Error, if any + * @param [response] WriteStream + */ + type GetWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.WriteStream) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. + * @param error Error, if any + * @param [response] FinalizeWriteStreamResponse + */ + type FinalizeWriteStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. + * @param error Error, if any + * @param [response] BatchCommitWriteStreamsResponse + */ + type BatchCommitWriteStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. + * @param error Error, if any + * @param [response] FlushRowsResponse + */ + type FlushRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1.FlushRowsResponse) => void; + } + + /** Properties of a CreateReadSessionRequest. */ + interface ICreateReadSessionRequest { + + /** CreateReadSessionRequest parent */ + parent?: (string|null); + + /** CreateReadSessionRequest readSession */ + readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); + + /** CreateReadSessionRequest maxStreamCount */ + maxStreamCount?: (number|null); + + /** CreateReadSessionRequest preferredMinStreamCount */ + preferredMinStreamCount?: (number|null); + } + + /** Represents a CreateReadSessionRequest. */ + class CreateReadSessionRequest implements ICreateReadSessionRequest { + + /** + * Constructs a new CreateReadSessionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest); + + /** CreateReadSessionRequest parent. */ + public parent: string; + + /** CreateReadSessionRequest readSession. */ + public readSession?: (google.cloud.bigquery.storage.v1.IReadSession|null); + + /** CreateReadSessionRequest maxStreamCount. */ + public maxStreamCount: number; + + /** CreateReadSessionRequest preferredMinStreamCount. */ + public preferredMinStreamCount: number; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateReadSessionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Verifies a CreateReadSessionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateReadSessionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateReadSessionRequest; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @param message CreateReadSessionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReadRowsRequest. */ + interface IReadRowsRequest { + + /** ReadRowsRequest readStream */ + readStream?: (string|null); + + /** ReadRowsRequest offset */ + offset?: (number|Long|string|null); + } + + /** Represents a ReadRowsRequest. */ + class ReadRowsRequest implements IReadRowsRequest { + + /** + * Constructs a new ReadRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest); + + /** ReadRowsRequest readStream. */ + public readStream: string; + + /** ReadRowsRequest offset. */ + public offset: (number|Long|string); + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsRequest): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Verifies a ReadRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsRequest; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @param message ReadRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ThrottleState. */ + interface IThrottleState { + + /** ThrottleState throttlePercent */ + throttlePercent?: (number|null); + } + + /** Represents a ThrottleState. */ + class ThrottleState implements IThrottleState { + + /** + * Constructs a new ThrottleState. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IThrottleState); + + /** ThrottleState throttlePercent. */ + public throttlePercent: number; + + /** + * Creates a new ThrottleState instance using the specified properties. + * @param [properties] Properties to set + * @returns ThrottleState instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IThrottleState): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @param message ThrottleState message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @param message ThrottleState message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IThrottleState, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ThrottleState message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Decodes a ThrottleState message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Verifies a ThrottleState message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ThrottleState + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ThrottleState; + + /** + * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. + * @param message ThrottleState + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ThrottleState, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ThrottleState to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ThrottleState + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamStats. */ + interface IStreamStats { + + /** StreamStats progress */ + progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); + } + + /** Represents a StreamStats. */ + class StreamStats implements IStreamStats { + + /** + * Constructs a new StreamStats. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IStreamStats); + + /** StreamStats progress. */ + public progress?: (google.cloud.bigquery.storage.v1.StreamStats.IProgress|null); + + /** + * Creates a new StreamStats instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamStats instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IStreamStats): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @param message StreamStats message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @param message StreamStats message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStreamStats, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamStats message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Decodes a StreamStats message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Verifies a StreamStats message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamStats + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats; + + /** + * Creates a plain object from a StreamStats message. Also converts values to other types if specified. + * @param message StreamStats + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamStats to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamStats + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace StreamStats { + + /** Properties of a Progress. */ + interface IProgress { + + /** Progress atResponseStart */ + atResponseStart?: (number|null); + + /** Progress atResponseEnd */ + atResponseEnd?: (number|null); + } + + /** Represents a Progress. */ + class Progress implements IProgress { + + /** + * Constructs a new Progress. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress); + + /** Progress atResponseStart. */ + public atResponseStart: number; + + /** Progress atResponseEnd. */ + public atResponseEnd: number; + + /** + * Creates a new Progress instance using the specified properties. + * @param [properties] Properties to set + * @returns Progress instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.StreamStats.IProgress): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.StreamStats.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Verifies a Progress message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Progress + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StreamStats.Progress; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @param message Progress + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.StreamStats.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Progress to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Progress + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of a ReadRowsResponse. */ + interface IReadRowsResponse { + + /** ReadRowsResponse avroRows */ + avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch */ + arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount */ + rowCount?: (number|Long|string|null); + + /** ReadRowsResponse stats */ + stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); + + /** ReadRowsResponse throttleState */ + throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + + /** ReadRowsResponse avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + } + + /** Represents a ReadRowsResponse. */ + class ReadRowsResponse implements IReadRowsResponse { + + /** + * Constructs a new ReadRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse); + + /** ReadRowsResponse avroRows. */ + public avroRows?: (google.cloud.bigquery.storage.v1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch. */ + public arrowRecordBatch?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount. */ + public rowCount: (number|Long|string); + + /** ReadRowsResponse stats. */ + public stats?: (google.cloud.bigquery.storage.v1.IStreamStats|null); + + /** ReadRowsResponse throttleState. */ + public throttleState?: (google.cloud.bigquery.storage.v1.IThrottleState|null); + + /** ReadRowsResponse avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadRowsResponse rows. */ + public rows?: ("avroRows"|"arrowRecordBatch"); + + /** ReadRowsResponse schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadRowsResponse): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Verifies a ReadRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadRowsResponse; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @param message ReadRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SplitReadStreamRequest. */ + interface ISplitReadStreamRequest { + + /** SplitReadStreamRequest name */ + name?: (string|null); + + /** SplitReadStreamRequest fraction */ + fraction?: (number|null); + } + + /** Represents a SplitReadStreamRequest. */ + class SplitReadStreamRequest implements ISplitReadStreamRequest { + + /** + * Constructs a new SplitReadStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest); + + /** SplitReadStreamRequest name. */ + public name: string; + + /** SplitReadStreamRequest fraction. */ + public fraction: number; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Verifies a SplitReadStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamRequest; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @param message SplitReadStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SplitReadStreamResponse. */ + interface ISplitReadStreamResponse { + + /** SplitReadStreamResponse primaryStream */ + primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** SplitReadStreamResponse remainderStream */ + remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + } + + /** Represents a SplitReadStreamResponse. */ + class SplitReadStreamResponse implements ISplitReadStreamResponse { + + /** + * Constructs a new SplitReadStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse); + + /** SplitReadStreamResponse primaryStream. */ + public primaryStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** SplitReadStreamResponse remainderStream. */ + public remainderStream?: (google.cloud.bigquery.storage.v1.IReadStream|null); + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Verifies a SplitReadStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.SplitReadStreamResponse; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @param message SplitReadStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a CreateWriteStreamRequest. */ + interface ICreateWriteStreamRequest { + + /** CreateWriteStreamRequest parent */ + parent?: (string|null); + + /** CreateWriteStreamRequest writeStream */ + writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); + } + + /** Represents a CreateWriteStreamRequest. */ + class CreateWriteStreamRequest implements ICreateWriteStreamRequest { + + /** + * Constructs a new CreateWriteStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest); + + /** CreateWriteStreamRequest parent. */ + public parent: string; + + /** CreateWriteStreamRequest writeStream. */ + public writeStream?: (google.cloud.bigquery.storage.v1.IWriteStream|null); + + /** + * Creates a new CreateWriteStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateWriteStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; + + /** + * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @param message CreateWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @param message CreateWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; + + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; + + /** + * Verifies a CreateWriteStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateWriteStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; + + /** + * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. + * @param message CreateWriteStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateWriteStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an AppendRowsRequest. */ + interface IAppendRowsRequest { + + /** AppendRowsRequest writeStream */ + writeStream?: (string|null); + + /** AppendRowsRequest offset */ + offset?: (google.protobuf.IInt64Value|null); + + /** AppendRowsRequest protoRows */ + protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + + /** AppendRowsRequest traceId */ + traceId?: (string|null); + } + + /** Represents an AppendRowsRequest. */ + class AppendRowsRequest implements IAppendRowsRequest { + + /** + * Constructs a new AppendRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest); + + /** AppendRowsRequest writeStream. */ + public writeStream: string; + + /** AppendRowsRequest offset. */ + public offset?: (google.protobuf.IInt64Value|null); + + /** AppendRowsRequest protoRows. */ + public protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + + /** AppendRowsRequest traceId. */ + public traceId: string; + + /** AppendRowsRequest rows. */ + public rows?: "protoRows"; + + /** + * Creates a new AppendRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns AppendRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsRequest): google.cloud.bigquery.storage.v1.AppendRowsRequest; + + /** + * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @param message AppendRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @param message AppendRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AppendRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AppendRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest; + + /** + * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AppendRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest; + + /** + * Verifies an AppendRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AppendRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest; + + /** + * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. + * @param message AppendRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AppendRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace AppendRowsRequest { + + /** Properties of a ProtoData. */ + interface IProtoData { + + /** ProtoData writerSchema */ + writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); + + /** ProtoData rows */ + rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); + } + + /** Represents a ProtoData. */ + class ProtoData implements IProtoData { + + /** + * Constructs a new ProtoData. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData); + + /** ProtoData writerSchema. */ + public writerSchema?: (google.cloud.bigquery.storage.v1.IProtoSchema|null); + + /** ProtoData rows. */ + public rows?: (google.cloud.bigquery.storage.v1.IProtoRows|null); + + /** + * Creates a new ProtoData instance using the specified properties. + * @param [properties] Properties to set + * @returns ProtoData instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; + + /** + * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @param message ProtoData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @param message ProtoData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ProtoData message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ProtoData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; + + /** + * Decodes a ProtoData message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ProtoData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; + + /** + * Verifies a ProtoData message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ProtoData + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData; + + /** + * Creates a plain object from a ProtoData message. Also converts values to other types if specified. + * @param message ProtoData + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ProtoData to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ProtoData + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of an AppendRowsResponse. */ + interface IAppendRowsResponse { + + /** AppendRowsResponse appendResult */ + appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); + + /** AppendRowsResponse error */ + error?: (google.rpc.IStatus|null); + + /** AppendRowsResponse updatedSchema */ + updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** AppendRowsResponse rowErrors */ + rowErrors?: (google.cloud.bigquery.storage.v1.IRowError[]|null); + + /** AppendRowsResponse writeStream */ + writeStream?: (string|null); + } + + /** Represents an AppendRowsResponse. */ + class AppendRowsResponse implements IAppendRowsResponse { + + /** + * Constructs a new AppendRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse); + + /** AppendRowsResponse appendResult. */ + public appendResult?: (google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null); + + /** AppendRowsResponse error. */ + public error?: (google.rpc.IStatus|null); + + /** AppendRowsResponse updatedSchema. */ + public updatedSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** AppendRowsResponse rowErrors. */ + public rowErrors: google.cloud.bigquery.storage.v1.IRowError[]; + + /** AppendRowsResponse writeStream. */ + public writeStream: string; + + /** AppendRowsResponse response. */ + public response?: ("appendResult"|"error"); + + /** + * Creates a new AppendRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns AppendRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IAppendRowsResponse): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @param message AppendRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @param message AppendRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IAppendRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Verifies an AppendRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AppendRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse; + + /** + * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. + * @param message AppendRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AppendRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace AppendRowsResponse { + + /** Properties of an AppendResult. */ + interface IAppendResult { + + /** AppendResult offset */ + offset?: (google.protobuf.IInt64Value|null); + } + + /** Represents an AppendResult. */ + class AppendResult implements IAppendResult { + + /** + * Constructs a new AppendResult. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult); + + /** AppendResult offset. */ + public offset?: (google.protobuf.IInt64Value|null); + + /** + * Creates a new AppendResult instance using the specified properties. + * @param [properties] Properties to set + * @returns AppendResult instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; + + /** + * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @param message AppendResult message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @param message AppendResult message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AppendResult message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AppendResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; + + /** + * Decodes an AppendResult message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AppendResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; + + /** + * Verifies an AppendResult message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AppendResult + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult; + + /** + * Creates a plain object from an AppendResult message. Also converts values to other types if specified. + * @param message AppendResult + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AppendResult to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AppendResult + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of a GetWriteStreamRequest. */ + interface IGetWriteStreamRequest { + + /** GetWriteStreamRequest name */ + name?: (string|null); + + /** GetWriteStreamRequest view */ + view?: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView|null); + } + + /** Represents a GetWriteStreamRequest. */ + class GetWriteStreamRequest implements IGetWriteStreamRequest { + + /** + * Constructs a new GetWriteStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest); + + /** GetWriteStreamRequest name. */ + public name: string; + + /** GetWriteStreamRequest view. */ + public view: (google.cloud.bigquery.storage.v1.WriteStreamView|keyof typeof google.cloud.bigquery.storage.v1.WriteStreamView); + + /** + * Creates a new GetWriteStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetWriteStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; + + /** + * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @param message GetWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @param message GetWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetWriteStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; + + /** + * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GetWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; + + /** + * Verifies a GetWriteStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GetWriteStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.GetWriteStreamRequest; + + /** + * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. + * @param message GetWriteStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.GetWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GetWriteStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for GetWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCommitWriteStreamsRequest. */ + interface IBatchCommitWriteStreamsRequest { + + /** BatchCommitWriteStreamsRequest parent */ + parent?: (string|null); + + /** BatchCommitWriteStreamsRequest writeStreams */ + writeStreams?: (string[]|null); + } + + /** Represents a BatchCommitWriteStreamsRequest. */ + class BatchCommitWriteStreamsRequest implements IBatchCommitWriteStreamsRequest { + + /** + * Constructs a new BatchCommitWriteStreamsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest); + + /** BatchCommitWriteStreamsRequest parent. */ + public parent: string; + + /** BatchCommitWriteStreamsRequest writeStreams. */ + public writeStreams: string[]; + + /** + * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCommitWriteStreamsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; + + /** + * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @param message BatchCommitWriteStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @param message BatchCommitWriteStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCommitWriteStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; + + /** + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCommitWriteStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; + + /** + * Verifies a BatchCommitWriteStreamsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCommitWriteStreamsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; + + /** + * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. + * @param message BatchCommitWriteStreamsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCommitWriteStreamsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCommitWriteStreamsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCommitWriteStreamsResponse. */ + interface IBatchCommitWriteStreamsResponse { + + /** BatchCommitWriteStreamsResponse commitTime */ + commitTime?: (google.protobuf.ITimestamp|null); + + /** BatchCommitWriteStreamsResponse streamErrors */ + streamErrors?: (google.cloud.bigquery.storage.v1.IStorageError[]|null); + } + + /** Represents a BatchCommitWriteStreamsResponse. */ + class BatchCommitWriteStreamsResponse implements IBatchCommitWriteStreamsResponse { + + /** + * Constructs a new BatchCommitWriteStreamsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse); + + /** BatchCommitWriteStreamsResponse commitTime. */ + public commitTime?: (google.protobuf.ITimestamp|null); + + /** BatchCommitWriteStreamsResponse streamErrors. */ + public streamErrors: google.cloud.bigquery.storage.v1.IStorageError[]; + + /** + * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCommitWriteStreamsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; + + /** + * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @param message BatchCommitWriteStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @param message BatchCommitWriteStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCommitWriteStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; + + /** + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCommitWriteStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; + + /** + * Verifies a BatchCommitWriteStreamsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCommitWriteStreamsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; + + /** + * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. + * @param message BatchCommitWriteStreamsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCommitWriteStreamsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCommitWriteStreamsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FinalizeWriteStreamRequest. */ + interface IFinalizeWriteStreamRequest { + + /** FinalizeWriteStreamRequest name */ + name?: (string|null); + } + + /** Represents a FinalizeWriteStreamRequest. */ + class FinalizeWriteStreamRequest implements IFinalizeWriteStreamRequest { + + /** + * Constructs a new FinalizeWriteStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest); + + /** FinalizeWriteStreamRequest name. */ + public name: string; + + /** + * Creates a new FinalizeWriteStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns FinalizeWriteStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; + + /** + * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @param message FinalizeWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @param message FinalizeWriteStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FinalizeWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; + + /** + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FinalizeWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; + + /** + * Verifies a FinalizeWriteStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FinalizeWriteStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest; + + /** + * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. + * @param message FinalizeWriteStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FinalizeWriteStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeWriteStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FinalizeWriteStreamResponse. */ + interface IFinalizeWriteStreamResponse { + + /** FinalizeWriteStreamResponse rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents a FinalizeWriteStreamResponse. */ + class FinalizeWriteStreamResponse implements IFinalizeWriteStreamResponse { + + /** + * Constructs a new FinalizeWriteStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse); + + /** FinalizeWriteStreamResponse rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new FinalizeWriteStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns FinalizeWriteStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; + + /** + * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @param message FinalizeWriteStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @param message FinalizeWriteStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FinalizeWriteStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; + + /** + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FinalizeWriteStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; + + /** + * Verifies a FinalizeWriteStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FinalizeWriteStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; + + /** + * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. + * @param message FinalizeWriteStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FinalizeWriteStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeWriteStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FlushRowsRequest. */ + interface IFlushRowsRequest { + + /** FlushRowsRequest writeStream */ + writeStream?: (string|null); + + /** FlushRowsRequest offset */ + offset?: (google.protobuf.IInt64Value|null); + } + + /** Represents a FlushRowsRequest. */ + class FlushRowsRequest implements IFlushRowsRequest { + + /** + * Constructs a new FlushRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest); + + /** FlushRowsRequest writeStream. */ + public writeStream: string; + + /** FlushRowsRequest offset. */ + public offset?: (google.protobuf.IInt64Value|null); + + /** + * Creates a new FlushRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns FlushRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsRequest): google.cloud.bigquery.storage.v1.FlushRowsRequest; + + /** + * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @param message FlushRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @param message FlushRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FlushRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FlushRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsRequest; + + /** + * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FlushRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsRequest; + + /** + * Verifies a FlushRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FlushRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsRequest; + + /** + * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. + * @param message FlushRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FlushRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FlushRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FlushRowsResponse. */ + interface IFlushRowsResponse { + + /** FlushRowsResponse offset */ + offset?: (number|Long|string|null); + } + + /** Represents a FlushRowsResponse. */ + class FlushRowsResponse implements IFlushRowsResponse { + + /** + * Constructs a new FlushRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse); + + /** FlushRowsResponse offset. */ + public offset: (number|Long|string); + + /** + * Creates a new FlushRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns FlushRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IFlushRowsResponse): google.cloud.bigquery.storage.v1.FlushRowsResponse; + + /** + * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @param message FlushRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @param message FlushRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IFlushRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FlushRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.FlushRowsResponse; + + /** + * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.FlushRowsResponse; + + /** + * Verifies a FlushRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FlushRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.FlushRowsResponse; + + /** + * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. + * @param message FlushRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.FlushRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FlushRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FlushRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StorageError. */ + interface IStorageError { + + /** StorageError code */ + code?: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null); + + /** StorageError entity */ + entity?: (string|null); + + /** StorageError errorMessage */ + errorMessage?: (string|null); + } + + /** Represents a StorageError. */ + class StorageError implements IStorageError { + + /** + * Constructs a new StorageError. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IStorageError); + + /** StorageError code. */ + public code: (google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|keyof typeof google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode); + + /** StorageError entity. */ + public entity: string; + + /** StorageError errorMessage. */ + public errorMessage: string; + + /** + * Creates a new StorageError instance using the specified properties. + * @param [properties] Properties to set + * @returns StorageError instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IStorageError): google.cloud.bigquery.storage.v1.StorageError; + + /** + * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @param message StorageError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @param message StorageError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IStorageError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StorageError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StorageError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.StorageError; + + /** + * Decodes a StorageError message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StorageError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.StorageError; + + /** + * Verifies a StorageError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StorageError message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StorageError + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.StorageError; + + /** + * Creates a plain object from a StorageError message. Also converts values to other types if specified. + * @param message StorageError + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.StorageError, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StorageError to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StorageError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace StorageError { + + /** StorageErrorCode enum. */ + enum StorageErrorCode { + STORAGE_ERROR_CODE_UNSPECIFIED = 0, + TABLE_NOT_FOUND = 1, + STREAM_ALREADY_COMMITTED = 2, + STREAM_NOT_FOUND = 3, + INVALID_STREAM_TYPE = 4, + INVALID_STREAM_STATE = 5, + STREAM_FINALIZED = 6, + SCHEMA_MISMATCH_EXTRA_FIELDS = 7, + OFFSET_ALREADY_EXISTS = 8, + OFFSET_OUT_OF_RANGE = 9 + } + } + + /** Properties of a RowError. */ + interface IRowError { + + /** RowError index */ + index?: (number|Long|string|null); + + /** RowError code */ + code?: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null); + + /** RowError message */ + message?: (string|null); + } + + /** Represents a RowError. */ + class RowError implements IRowError { + + /** + * Constructs a new RowError. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IRowError); + + /** RowError index. */ + public index: (number|Long|string); + + /** RowError code. */ + public code: (google.cloud.bigquery.storage.v1.RowError.RowErrorCode|keyof typeof google.cloud.bigquery.storage.v1.RowError.RowErrorCode); + + /** RowError message. */ + public message: string; + + /** + * Creates a new RowError instance using the specified properties. + * @param [properties] Properties to set + * @returns RowError instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IRowError): google.cloud.bigquery.storage.v1.RowError; + + /** + * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @param message RowError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @param message RowError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IRowError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a RowError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.RowError; + + /** + * Decodes a RowError message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.RowError; + + /** + * Verifies a RowError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a RowError message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns RowError + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.RowError; + + /** + * Creates a plain object from a RowError message. Also converts values to other types if specified. + * @param message RowError + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.RowError, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this RowError to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for RowError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace RowError { + + /** RowErrorCode enum. */ + enum RowErrorCode { + ROW_ERROR_CODE_UNSPECIFIED = 0, + FIELDS_ERROR = 1 + } + } + + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 2 + } + + /** Properties of a ReadSession. */ + interface IReadSession { + + /** ReadSession name */ + name?: (string|null); + + /** ReadSession expireTime */ + expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession dataFormat */ + dataFormat?: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat|null); + + /** ReadSession avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadSession arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadSession table */ + table?: (string|null); + + /** ReadSession tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); + + /** ReadSession readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1.IReadStream[]|null); + + /** ReadSession estimatedTotalBytesScanned */ + estimatedTotalBytesScanned?: (number|Long|string|null); + + /** ReadSession traceId */ + traceId?: (string|null); + } + + /** Represents a ReadSession. */ + class ReadSession implements IReadSession { + + /** + * Constructs a new ReadSession. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadSession); + + /** ReadSession name. */ + public name: string; + + /** ReadSession expireTime. */ + public expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession dataFormat. */ + public dataFormat: (google.cloud.bigquery.storage.v1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1.DataFormat); + + /** ReadSession avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1.IAvroSchema|null); + + /** ReadSession arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadSession table. */ + public table: string; + + /** ReadSession tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null); + + /** ReadSession readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1.IReadStream[]; + + /** ReadSession estimatedTotalBytesScanned. */ + public estimatedTotalBytesScanned: (number|Long|string); + + /** ReadSession traceId. */ + public traceId: string; + + /** ReadSession schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + + /** + * Creates a new ReadSession instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadSession instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadSession): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Verifies a ReadSession message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadSession + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @param message ReadSession + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadSession to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadSession + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace ReadSession { + + /** Properties of a TableModifiers. */ + interface ITableModifiers { + + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { + + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers); + + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableModifiers; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableModifiers + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { + + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); + + /** TableReadOptions arrowSerializationOptions */ + arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + + /** TableReadOptions avroSerializationOptions */ + avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); + } + + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { + + /** + * Constructs a new TableReadOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions); + + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; + + /** TableReadOptions arrowSerializationOptions. */ + public arrowSerializationOptions?: (google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null); + + /** TableReadOptions avroSerializationOptions. */ + public avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); + + /** TableReadOptions outputFormatSerializationOptions. */ + public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReadOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Verifies a TableReadOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReadOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReadOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReadOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of a ReadStream. */ + interface IReadStream { + + /** ReadStream name */ + name?: (string|null); + } + + /** Represents a ReadStream. */ + class ReadStream implements IReadStream { + + /** + * Constructs a new ReadStream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IReadStream); + + /** ReadStream name. */ + public name: string; + + /** + * Creates a new ReadStream instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadStream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IReadStream): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Verifies a ReadStream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadStream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.ReadStream; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @param message ReadStream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadStream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** WriteStreamView enum. */ + enum WriteStreamView { + WRITE_STREAM_VIEW_UNSPECIFIED = 0, + BASIC = 1, + FULL = 2 + } + + /** Properties of a WriteStream. */ + interface IWriteStream { + + /** WriteStream name */ + name?: (string|null); + + /** WriteStream type */ + type?: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type|null); + + /** WriteStream createTime */ + createTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream commitTime */ + commitTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream tableSchema */ + tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** WriteStream writeMode */ + writeMode?: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null); + + /** WriteStream location */ + location?: (string|null); + } + + /** Represents a WriteStream. */ + class WriteStream implements IWriteStream { + + /** + * Constructs a new WriteStream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.IWriteStream); + + /** WriteStream name. */ + public name: string; + + /** WriteStream type. */ + public type: (google.cloud.bigquery.storage.v1.WriteStream.Type|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.Type); + + /** WriteStream createTime. */ + public createTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream commitTime. */ + public commitTime?: (google.protobuf.ITimestamp|null); + + /** WriteStream tableSchema. */ + public tableSchema?: (google.cloud.bigquery.storage.v1.ITableSchema|null); + + /** WriteStream writeMode. */ + public writeMode: (google.cloud.bigquery.storage.v1.WriteStream.WriteMode|keyof typeof google.cloud.bigquery.storage.v1.WriteStream.WriteMode); + + /** WriteStream location. */ + public location: string; + + /** + * Creates a new WriteStream instance using the specified properties. + * @param [properties] Properties to set + * @returns WriteStream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.IWriteStream): google.cloud.bigquery.storage.v1.WriteStream; + + /** + * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @param message WriteStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @param message WriteStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.IWriteStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a WriteStream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns WriteStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.WriteStream; + + /** + * Decodes a WriteStream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns WriteStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.WriteStream; + + /** + * Verifies a WriteStream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns WriteStream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.WriteStream; + + /** + * Creates a plain object from a WriteStream message. Also converts values to other types if specified. + * @param message WriteStream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.WriteStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this WriteStream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for WriteStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace WriteStream { + + /** Type enum. */ + enum Type { + TYPE_UNSPECIFIED = 0, + COMMITTED = 1, + PENDING = 2, + BUFFERED = 3 + } + + /** WriteMode enum. */ + enum WriteMode { + WRITE_MODE_UNSPECIFIED = 0, + INSERT = 1 + } + } + + /** Properties of a TableSchema. */ + interface ITableSchema { + + /** TableSchema fields */ + fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); + } + + /** Represents a TableSchema. */ + class TableSchema implements ITableSchema { + + /** + * Constructs a new TableSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ITableSchema); + + /** TableSchema fields. */ + public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; + + /** + * Creates a new TableSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns TableSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ITableSchema): google.cloud.bigquery.storage.v1.TableSchema; + + /** + * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @param message TableSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @param message TableSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableSchema; + + /** + * Decodes a TableSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableSchema; + + /** + * Verifies a TableSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableSchema; + + /** + * Creates a plain object from a TableSchema message. Also converts values to other types if specified. + * @param message TableSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.TableSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a TableFieldSchema. */ + interface ITableFieldSchema { + + /** TableFieldSchema name */ + name?: (string|null); + + /** TableFieldSchema type */ + type?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null); + + /** TableFieldSchema mode */ + mode?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null); + + /** TableFieldSchema fields */ + fields?: (google.cloud.bigquery.storage.v1.ITableFieldSchema[]|null); + + /** TableFieldSchema description */ + description?: (string|null); + + /** TableFieldSchema maxLength */ + maxLength?: (number|Long|string|null); + + /** TableFieldSchema precision */ + precision?: (number|Long|string|null); + + /** TableFieldSchema scale */ + scale?: (number|Long|string|null); + } + + /** Represents a TableFieldSchema. */ + class TableFieldSchema implements ITableFieldSchema { + + /** + * Constructs a new TableFieldSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema); + + /** TableFieldSchema name. */ + public name: string; + + /** TableFieldSchema type. */ + public type: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type); + + /** TableFieldSchema mode. */ + public mode: (google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Mode); + + /** TableFieldSchema fields. */ + public fields: google.cloud.bigquery.storage.v1.ITableFieldSchema[]; + + /** TableFieldSchema description. */ + public description: string; + + /** TableFieldSchema maxLength. */ + public maxLength: (number|Long|string); + + /** TableFieldSchema precision. */ + public precision: (number|Long|string); + + /** TableFieldSchema scale. */ + public scale: (number|Long|string); + + /** + * Creates a new TableFieldSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns TableFieldSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.ITableFieldSchema): google.cloud.bigquery.storage.v1.TableFieldSchema; + + /** + * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @param message TableFieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @param message TableFieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.ITableFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableFieldSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableFieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableFieldSchema; + + /** + * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableFieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableFieldSchema; + + /** + * Verifies a TableFieldSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableFieldSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableFieldSchema; + + /** + * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. + * @param message TableFieldSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.TableFieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableFieldSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableFieldSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace TableFieldSchema { + + /** Type enum. */ + enum Type { + TYPE_UNSPECIFIED = 0, + STRING = 1, + INT64 = 2, + DOUBLE = 3, + STRUCT = 4, + BYTES = 5, + BOOL = 6, + TIMESTAMP = 7, + DATE = 8, + TIME = 9, + DATETIME = 10, + GEOGRAPHY = 11, + NUMERIC = 12, + BIGNUMERIC = 13, + INTERVAL = 14, + JSON = 15 + } + + /** Mode enum. */ + enum Mode { + MODE_UNSPECIFIED = 0, + NULLABLE = 1, + REQUIRED = 2, + REPEATED = 3 + } + } + } + + /** Namespace v1beta1. */ + namespace v1beta1 { + + /** Properties of an ArrowSchema. */ + interface IArrowSchema { + + /** ArrowSchema serializedSchema */ + serializedSchema?: (Uint8Array|string|null); + } + + /** Represents an ArrowSchema. */ + class ArrowSchema implements IArrowSchema { + + /** + * Constructs a new ArrowSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); + + /** ArrowSchema serializedSchema. */ + public serializedSchema: (Uint8Array|string); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @param message ArrowSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Verifies an ArrowSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowSchema; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @param message ArrowSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an ArrowRecordBatch. */ + interface IArrowRecordBatch { + + /** ArrowRecordBatch serializedRecordBatch */ + serializedRecordBatch?: (Uint8Array|string|null); + + /** ArrowRecordBatch rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an ArrowRecordBatch. */ + class ArrowRecordBatch implements IArrowRecordBatch { + + /** + * Constructs a new ArrowRecordBatch. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); + + /** ArrowRecordBatch serializedRecordBatch. */ + public serializedRecordBatch: (Uint8Array|string); + + /** ArrowRecordBatch rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowRecordBatch instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @param message ArrowRecordBatch message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Verifies an ArrowRecordBatch message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowRecordBatch + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @param message ArrowRecordBatch + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowRecordBatch + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an AvroSchema. */ + interface IAvroSchema { + + /** AvroSchema schema */ + schema?: (string|null); + } + + /** Represents an AvroSchema. */ + class AvroSchema implements IAvroSchema { + + /** + * Constructs a new AvroSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema); + + /** AvroSchema schema. */ + public schema: string; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroSchema): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @param message AvroSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Verifies an AvroSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroSchema; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @param message AvroSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an AvroRows. */ + interface IAvroRows { + + /** AvroRows serializedBinaryRows */ + serializedBinaryRows?: (Uint8Array|string|null); + + /** AvroRows rowCount */ + rowCount?: (number|Long|string|null); + } + + /** Represents an AvroRows. */ + class AvroRows implements IAvroRows { + + /** + * Constructs a new AvroRows. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); + + /** AvroRows serializedBinaryRows. */ + public serializedBinaryRows: (Uint8Array|string); + + /** AvroRows rowCount. */ + public rowCount: (number|Long|string); + + /** + * Creates a new AvroRows instance using the specified properties. + * @param [properties] Properties to set + * @returns AvroRows instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @param message AvroRows message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IAvroRows, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Verifies an AvroRows message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns AvroRows + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.AvroRows; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @param message AvroRows + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.AvroRows, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this AvroRows to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for AvroRows + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a TableReadOptions. */ + interface ITableReadOptions { + + /** TableReadOptions selectedFields */ + selectedFields?: (string[]|null); + + /** TableReadOptions rowRestriction */ + rowRestriction?: (string|null); + } + + /** Represents a TableReadOptions. */ + class TableReadOptions implements ITableReadOptions { + + /** + * Constructs a new TableReadOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions); + + /** TableReadOptions selectedFields. */ + public selectedFields: string[]; + + /** TableReadOptions rowRestriction. */ + public rowRestriction: string; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReadOptions instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReadOptions): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @param message TableReadOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReadOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Verifies a TableReadOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReadOptions + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReadOptions; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @param message TableReadOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReadOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReadOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReadOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Represents a BigQueryStorage */ + class BigQueryStorage extends $protobuf.rpc.Service { + + /** + * Constructs a new BigQueryStorage service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new BigQueryStorage service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): BigQueryStorage; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadSession + */ + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback): void; + + /** + * Calls CreateReadSession. + * @param request CreateReadSessionRequest message or plain object + * @returns Promise + */ + public createReadSession(request: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): Promise; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ReadRowsResponse + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback): void; + + /** + * Calls ReadRows. + * @param request ReadRowsRequest message or plain object + * @returns Promise + */ + public readRows(request: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): Promise; + + /** + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + */ + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback): void; + + /** + * Calls BatchCreateReadSessionStreams. + * @param request BatchCreateReadSessionStreamsRequest message or plain object + * @returns Promise + */ + public batchCreateReadSessionStreams(request: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): Promise; + + /** + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback): void; + + /** + * Calls FinalizeStream. + * @param request FinalizeStreamRequest message or plain object + * @returns Promise + */ + public finalizeStream(request: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): Promise; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @param callback Node-style callback called with the error, if any, and SplitReadStreamResponse + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, callback: google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback): void; + + /** + * Calls SplitReadStream. + * @param request SplitReadStreamRequest message or plain object + * @returns Promise + */ + public splitReadStream(request: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): Promise; + } + + namespace BigQueryStorage { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. + * @param error Error, if any + * @param [response] ReadSession + */ + type CreateReadSessionCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadSession) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. + * @param error Error, if any + * @param [response] ReadRowsResponse + */ + type ReadRowsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. + * @param error Error, if any + * @param [response] BatchCreateReadSessionStreamsResponse + */ + type BatchCreateReadSessionStreamsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. + * @param error Error, if any + * @param [response] Empty + */ + type FinalizeStreamCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. + * @param error Error, if any + * @param [response] SplitReadStreamResponse + */ + type SplitReadStreamCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) => void; + } + + /** Properties of a Stream. */ + interface IStream { + + /** Stream name */ + name?: (string|null); + } + + /** Represents a Stream. */ + class Stream implements IStream { + + /** + * Constructs a new Stream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStream); + + /** Stream name. */ + public name: string; + + /** + * Creates a new Stream instance using the specified properties. + * @param [properties] Properties to set + * @returns Stream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStream): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @param message Stream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Stream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Decodes a Stream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Verifies a Stream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Stream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Stream; + + /** + * Creates a plain object from a Stream message. Also converts values to other types if specified. + * @param message Stream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Stream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Stream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Stream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamPosition. */ + interface IStreamPosition { + + /** StreamPosition stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** StreamPosition offset */ + offset?: (number|Long|string|null); + } + + /** Represents a StreamPosition. */ + class StreamPosition implements IStreamPosition { + + /** + * Constructs a new StreamPosition. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition); + + /** StreamPosition stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** StreamPosition offset. */ + public offset: (number|Long|string); + + /** + * Creates a new StreamPosition instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamPosition instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamPosition): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @param message StreamPosition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamPosition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamPosition message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Verifies a StreamPosition message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamPosition + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamPosition; + + /** + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * @param message StreamPosition + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamPosition, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamPosition to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamPosition + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReadSession. */ + interface IReadSession { + + /** ReadSession name */ + name?: (string|null); + + /** ReadSession expireTime */ + expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + + /** ReadSession tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + } + + /** Represents a ReadSession. */ + class ReadSession implements IReadSession { + + /** + * Constructs a new ReadSession. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession); + + /** ReadSession name. */ + public name: string; + + /** ReadSession expireTime. */ + public expireTime?: (google.protobuf.ITimestamp|null); + + /** ReadSession avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadSession arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + + /** ReadSession streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** ReadSession tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** ReadSession tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** ReadSession shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** ReadSession schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + + /** + * Creates a new ReadSession instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadSession instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadSession): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @param message ReadSession message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadSession, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Verifies a ReadSession message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadSession + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadSession; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @param message ReadSession + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadSession, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadSession to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadSession + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a CreateReadSessionRequest. */ + interface ICreateReadSessionRequest { + + /** CreateReadSessionRequest tableReference */ + tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** CreateReadSessionRequest parent */ + parent?: (string|null); + + /** CreateReadSessionRequest tableModifiers */ + tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** CreateReadSessionRequest requestedStreams */ + requestedStreams?: (number|null); + + /** CreateReadSessionRequest readOptions */ + readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + + /** CreateReadSessionRequest format */ + format?: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat|null); + + /** CreateReadSessionRequest shardingStrategy */ + shardingStrategy?: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null); + } + + /** Represents a CreateReadSessionRequest. */ + class CreateReadSessionRequest implements ICreateReadSessionRequest { + + /** + * Constructs a new CreateReadSessionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest); + + /** CreateReadSessionRequest tableReference. */ + public tableReference?: (google.cloud.bigquery.storage.v1beta1.ITableReference|null); + + /** CreateReadSessionRequest parent. */ + public parent: string; + + /** CreateReadSessionRequest tableModifiers. */ + public tableModifiers?: (google.cloud.bigquery.storage.v1beta1.ITableModifiers|null); + + /** CreateReadSessionRequest requestedStreams. */ + public requestedStreams: number; + + /** CreateReadSessionRequest readOptions. */ + public readOptions?: (google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null); + + /** CreateReadSessionRequest format. */ + public format: (google.cloud.bigquery.storage.v1beta1.DataFormat|keyof typeof google.cloud.bigquery.storage.v1beta1.DataFormat); + + /** CreateReadSessionRequest shardingStrategy. */ + public shardingStrategy: (google.cloud.bigquery.storage.v1beta1.ShardingStrategy|keyof typeof google.cloud.bigquery.storage.v1beta1.ShardingStrategy); + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateReadSessionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @param message CreateReadSessionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Verifies a CreateReadSessionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateReadSessionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @param message CreateReadSessionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 3 + } + + /** ShardingStrategy enum. */ + enum ShardingStrategy { + SHARDING_STRATEGY_UNSPECIFIED = 0, + LIQUID = 1, + BALANCED = 2 + } + + /** Properties of a ReadRowsRequest. */ + interface IReadRowsRequest { + + /** ReadRowsRequest readPosition */ + readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + } + + /** Represents a ReadRowsRequest. */ + class ReadRowsRequest implements IReadRowsRequest { + + /** + * Constructs a new ReadRowsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest); + + /** ReadRowsRequest readPosition. */ + public readPosition?: (google.cloud.bigquery.storage.v1beta1.IStreamPosition|null); + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @param message ReadRowsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Verifies a ReadRowsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsRequest; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @param message ReadRowsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamStatus. */ + interface IStreamStatus { + + /** StreamStatus estimatedRowCount */ + estimatedRowCount?: (number|Long|string|null); + + /** StreamStatus fractionConsumed */ + fractionConsumed?: (number|null); + + /** StreamStatus progress */ + progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable */ + isSplittable?: (boolean|null); + } + + /** Represents a StreamStatus. */ + class StreamStatus implements IStreamStatus { + + /** + * Constructs a new StreamStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus); + + /** StreamStatus estimatedRowCount. */ + public estimatedRowCount: (number|Long|string); + + /** StreamStatus fractionConsumed. */ + public fractionConsumed: number; + + /** StreamStatus progress. */ + public progress?: (google.cloud.bigquery.storage.v1beta1.IProgress|null); + + /** StreamStatus isSplittable. */ + public isSplittable: boolean; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IStreamStatus): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @param message StreamStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IStreamStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Verifies a StreamStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.StreamStatus; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @param message StreamStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.StreamStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamStatus + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Progress. */ + interface IProgress { + + /** Progress atResponseStart */ + atResponseStart?: (number|null); + + /** Progress atResponseEnd */ + atResponseEnd?: (number|null); + } + + /** Represents a Progress. */ + class Progress implements IProgress { + + /** + * Constructs a new Progress. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IProgress); + + /** Progress atResponseStart. */ + public atResponseStart: number; + + /** Progress atResponseEnd. */ + public atResponseEnd: number; + + /** + * Creates a new Progress instance using the specified properties. + * @param [properties] Properties to set + * @returns Progress instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IProgress): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @param message Progress message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IProgress, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Verifies a Progress message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Progress + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.Progress; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @param message Progress + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.Progress, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Progress to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Progress + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ThrottleStatus. */ + interface IThrottleStatus { + + /** ThrottleStatus throttlePercent */ + throttlePercent?: (number|null); + } + + /** Represents a ThrottleStatus. */ + class ThrottleStatus implements IThrottleStatus { + + /** + * Constructs a new ThrottleStatus. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus); + + /** ThrottleStatus throttlePercent. */ + public throttlePercent: number; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @param [properties] Properties to set + * @returns ThrottleStatus instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IThrottleStatus): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @param message ThrottleStatus message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IThrottleStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Verifies a ThrottleStatus message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ThrottleStatus + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ThrottleStatus; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @param message ThrottleStatus + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ThrottleStatus, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ThrottleStatus to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ThrottleStatus + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReadRowsResponse. */ + interface IReadRowsResponse { + + /** ReadRowsResponse avroRows */ + avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch */ + arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount */ + rowCount?: (number|Long|string|null); + + /** ReadRowsResponse status */ + status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus */ + throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + } + + /** Represents a ReadRowsResponse. */ + class ReadRowsResponse implements IReadRowsResponse { + + /** + * Constructs a new ReadRowsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse); + + /** ReadRowsResponse avroRows. */ + public avroRows?: (google.cloud.bigquery.storage.v1beta1.IAvroRows|null); + + /** ReadRowsResponse arrowRecordBatch. */ + public arrowRecordBatch?: (google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null); + + /** ReadRowsResponse rowCount. */ + public rowCount: (number|Long|string); + + /** ReadRowsResponse status. */ + public status?: (google.cloud.bigquery.storage.v1beta1.IStreamStatus|null); + + /** ReadRowsResponse throttleStatus. */ + public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + + /** ReadRowsResponse rows. */ + public rows?: ("avroRows"|"arrowRecordBatch"); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadRowsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @param message ReadRowsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IReadRowsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Verifies a ReadRowsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadRowsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.ReadRowsResponse; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @param message ReadRowsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadRowsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadRowsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateReadSessionStreamsRequest. */ + interface IBatchCreateReadSessionStreamsRequest { + + /** BatchCreateReadSessionStreamsRequest session */ + session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams */ + requestedStreams?: (number|null); + } + + /** Represents a BatchCreateReadSessionStreamsRequest. */ + class BatchCreateReadSessionStreamsRequest implements IBatchCreateReadSessionStreamsRequest { + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest); + + /** BatchCreateReadSessionStreamsRequest session. */ + public session?: (google.cloud.bigquery.storage.v1beta1.IReadSession|null); + + /** BatchCreateReadSessionStreamsRequest requestedStreams. */ + public requestedStreams: number; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateReadSessionStreamsResponse. */ + interface IBatchCreateReadSessionStreamsResponse { + + /** BatchCreateReadSessionStreamsResponse streams */ + streams?: (google.cloud.bigquery.storage.v1beta1.IStream[]|null); + } + + /** Represents a BatchCreateReadSessionStreamsResponse. */ + class BatchCreateReadSessionStreamsResponse implements IBatchCreateReadSessionStreamsResponse { + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse); + + /** BatchCreateReadSessionStreamsResponse streams. */ + public streams: google.cloud.bigquery.storage.v1beta1.IStream[]; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateReadSessionStreamsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @param message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateReadSessionStreamsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @param message BatchCreateReadSessionStreamsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FinalizeStreamRequest. */ + interface IFinalizeStreamRequest { + + /** FinalizeStreamRequest stream */ + stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a FinalizeStreamRequest. */ + class FinalizeStreamRequest implements IFinalizeStreamRequest { + + /** + * Constructs a new FinalizeStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest); + + /** FinalizeStreamRequest stream. */ + public stream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns FinalizeStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @param message FinalizeStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Verifies a FinalizeStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FinalizeStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @param message FinalizeStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FinalizeStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SplitReadStreamRequest. */ + interface ISplitReadStreamRequest { + + /** SplitReadStreamRequest originalStream */ + originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction */ + fraction?: (number|null); + } + + /** Represents a SplitReadStreamRequest. */ + class SplitReadStreamRequest implements ISplitReadStreamRequest { + + /** + * Constructs a new SplitReadStreamRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest); + + /** SplitReadStreamRequest originalStream. */ + public originalStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamRequest fraction. */ + public fraction: number; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @param message SplitReadStreamRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Verifies a SplitReadStreamRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @param message SplitReadStreamRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SplitReadStreamResponse. */ + interface ISplitReadStreamResponse { + + /** SplitReadStreamResponse primaryStream */ + primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream */ + remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + } + + /** Represents a SplitReadStreamResponse. */ + class SplitReadStreamResponse implements ISplitReadStreamResponse { + + /** + * Constructs a new SplitReadStreamResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse); + + /** SplitReadStreamResponse primaryStream. */ + public primaryStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** SplitReadStreamResponse remainderStream. */ + public remainderStream?: (google.cloud.bigquery.storage.v1beta1.IStream|null); + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns SplitReadStreamResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @param message SplitReadStreamResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Verifies a SplitReadStreamResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SplitReadStreamResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @param message SplitReadStreamResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a TableReference. */ + interface ITableReference { + + /** TableReference projectId */ + projectId?: (string|null); + + /** TableReference datasetId */ + datasetId?: (string|null); + + /** TableReference tableId */ + tableId?: (string|null); + } + + /** Represents a TableReference. */ + class TableReference implements ITableReference { + + /** + * Constructs a new TableReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference); + + /** TableReference projectId. */ + public projectId: string; + + /** TableReference datasetId. */ + public datasetId: string; + + /** TableReference tableId. */ + public tableId: string; + + /** + * Creates a new TableReference instance using the specified properties. + * @param [properties] Properties to set + * @returns TableReference instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableReference): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @param message TableReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Verifies a TableReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableReference + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableReference; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @param message TableReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableReference + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a TableModifiers. */ + interface ITableModifiers { + + /** TableModifiers snapshotTime */ + snapshotTime?: (google.protobuf.ITimestamp|null); + } + + /** Represents a TableModifiers. */ + class TableModifiers implements ITableModifiers { + + /** + * Constructs a new TableModifiers. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers); + + /** TableModifiers snapshotTime. */ + public snapshotTime?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new TableModifiers instance using the specified properties. + * @param [properties] Properties to set + * @returns TableModifiers instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta1.ITableModifiers): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @param message TableModifiers message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta1.ITableModifiers, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Verifies a TableModifiers message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns TableModifiers + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta1.TableModifiers; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @param message TableModifiers + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta1.TableModifiers, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this TableModifiers to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for TableModifiers + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + } + } + } + + /** Namespace protobuf. */ + namespace protobuf { + + /** Properties of a FileDescriptorSet. */ + interface IFileDescriptorSet { + + /** FileDescriptorSet file */ + file?: (google.protobuf.IFileDescriptorProto[]|null); + } + + /** Represents a FileDescriptorSet. */ + class FileDescriptorSet implements IFileDescriptorSet { + + /** + * Constructs a new FileDescriptorSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorSet); + + /** FileDescriptorSet file. */ + public file: google.protobuf.IFileDescriptorProto[]; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorSet instance + */ + public static create(properties?: google.protobuf.IFileDescriptorSet): google.protobuf.FileDescriptorSet; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @param message FileDescriptorSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorSet; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorSet; + + /** + * Verifies a FileDescriptorSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorSet; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @param message FileDescriptorSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileDescriptorSet + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FileDescriptorProto. */ + interface IFileDescriptorProto { + + /** FileDescriptorProto name */ + name?: (string|null); + + /** FileDescriptorProto package */ + "package"?: (string|null); + + /** FileDescriptorProto dependency */ + dependency?: (string[]|null); + + /** FileDescriptorProto publicDependency */ + publicDependency?: (number[]|null); + + /** FileDescriptorProto weakDependency */ + weakDependency?: (number[]|null); + + /** FileDescriptorProto messageType */ + messageType?: (google.protobuf.IDescriptorProto[]|null); + + /** FileDescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** FileDescriptorProto service */ + service?: (google.protobuf.IServiceDescriptorProto[]|null); + + /** FileDescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** FileDescriptorProto options */ + options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo */ + sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax */ + syntax?: (string|null); + + /** FileDescriptorProto edition */ + edition?: (string|null); + } + + /** Represents a FileDescriptorProto. */ + class FileDescriptorProto implements IFileDescriptorProto { + + /** + * Constructs a new FileDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileDescriptorProto); + + /** FileDescriptorProto name. */ + public name: string; + + /** FileDescriptorProto package. */ + public package: string; + + /** FileDescriptorProto dependency. */ + public dependency: string[]; + + /** FileDescriptorProto publicDependency. */ + public publicDependency: number[]; + + /** FileDescriptorProto weakDependency. */ + public weakDependency: number[]; + + /** FileDescriptorProto messageType. */ + public messageType: google.protobuf.IDescriptorProto[]; + + /** FileDescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** FileDescriptorProto service. */ + public service: google.protobuf.IServiceDescriptorProto[]; + + /** FileDescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** FileDescriptorProto options. */ + public options?: (google.protobuf.IFileOptions|null); + + /** FileDescriptorProto sourceCodeInfo. */ + public sourceCodeInfo?: (google.protobuf.ISourceCodeInfo|null); + + /** FileDescriptorProto syntax. */ + public syntax: string; + + /** FileDescriptorProto edition. */ + public edition: string; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FileDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFileDescriptorProto): google.protobuf.FileDescriptorProto; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @param message FileDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileDescriptorProto; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileDescriptorProto; + + /** + * Verifies a FileDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileDescriptorProto; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @param message FileDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a DescriptorProto. */ + interface IDescriptorProto { + + /** DescriptorProto name */ + name?: (string|null); + + /** DescriptorProto field */ + field?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto extension */ + extension?: (google.protobuf.IFieldDescriptorProto[]|null); + + /** DescriptorProto nestedType */ + nestedType?: (google.protobuf.IDescriptorProto[]|null); + + /** DescriptorProto enumType */ + enumType?: (google.protobuf.IEnumDescriptorProto[]|null); + + /** DescriptorProto extensionRange */ + extensionRange?: (google.protobuf.DescriptorProto.IExtensionRange[]|null); + + /** DescriptorProto oneofDecl */ + oneofDecl?: (google.protobuf.IOneofDescriptorProto[]|null); + + /** DescriptorProto options */ + options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange */ + reservedRange?: (google.protobuf.DescriptorProto.IReservedRange[]|null); + + /** DescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents a DescriptorProto. */ + class DescriptorProto implements IDescriptorProto { + + /** + * Constructs a new DescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDescriptorProto); + + /** DescriptorProto name. */ + public name: string; + + /** DescriptorProto field. */ + public field: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto extension. */ + public extension: google.protobuf.IFieldDescriptorProto[]; + + /** DescriptorProto nestedType. */ + public nestedType: google.protobuf.IDescriptorProto[]; + + /** DescriptorProto enumType. */ + public enumType: google.protobuf.IEnumDescriptorProto[]; + + /** DescriptorProto extensionRange. */ + public extensionRange: google.protobuf.DescriptorProto.IExtensionRange[]; + + /** DescriptorProto oneofDecl. */ + public oneofDecl: google.protobuf.IOneofDescriptorProto[]; + + /** DescriptorProto options. */ + public options?: (google.protobuf.IMessageOptions|null); + + /** DescriptorProto reservedRange. */ + public reservedRange: google.protobuf.DescriptorProto.IReservedRange[]; + + /** DescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns DescriptorProto instance + */ + public static create(properties?: google.protobuf.IDescriptorProto): google.protobuf.DescriptorProto; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @param message DescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto; + + /** + * Verifies a DescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @param message DescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace DescriptorProto { + + /** Properties of an ExtensionRange. */ + interface IExtensionRange { + + /** ExtensionRange start */ + start?: (number|null); + + /** ExtensionRange end */ + end?: (number|null); + + /** ExtensionRange options */ + options?: (google.protobuf.IExtensionRangeOptions|null); + } + + /** Represents an ExtensionRange. */ + class ExtensionRange implements IExtensionRange { + + /** + * Constructs a new ExtensionRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IExtensionRange); + + /** ExtensionRange start. */ + public start: number; + + /** ExtensionRange end. */ + public end: number; + + /** ExtensionRange options. */ + public options?: (google.protobuf.IExtensionRangeOptions|null); + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IExtensionRange): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @param message ExtensionRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IExtensionRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Verifies an ExtensionRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ExtensionRange; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @param message ExtensionRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ExtensionRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExtensionRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReservedRange. */ + interface IReservedRange { + + /** ReservedRange start */ + start?: (number|null); + + /** ReservedRange end */ + end?: (number|null); + } + + /** Represents a ReservedRange. */ + class ReservedRange implements IReservedRange { + + /** + * Constructs a new ReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.DescriptorProto.IReservedRange); + + /** ReservedRange start. */ + public start: number; + + /** ReservedRange end. */ + public end: number; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns ReservedRange instance + */ + public static create(properties?: google.protobuf.DescriptorProto.IReservedRange): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @param message ReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.DescriptorProto.IReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Verifies a ReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DescriptorProto.ReservedRange; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @param message ReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DescriptorProto.ReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReservedRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of an ExtensionRangeOptions. */ + interface IExtensionRangeOptions { + + /** ExtensionRangeOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an ExtensionRangeOptions. */ + class ExtensionRangeOptions implements IExtensionRangeOptions { + + /** + * Constructs a new ExtensionRangeOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IExtensionRangeOptions); + + /** ExtensionRangeOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ExtensionRangeOptions instance + */ + public static create(properties?: google.protobuf.IExtensionRangeOptions): google.protobuf.ExtensionRangeOptions; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @param message ExtensionRangeOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IExtensionRangeOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions; + + /** + * Verifies an ExtensionRangeOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExtensionRangeOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @param message ExtensionRangeOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExtensionRangeOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FieldDescriptorProto. */ + interface IFieldDescriptorProto { + + /** FieldDescriptorProto name */ + name?: (string|null); + + /** FieldDescriptorProto number */ + number?: (number|null); + + /** FieldDescriptorProto label */ + label?: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label|null); + + /** FieldDescriptorProto type */ + type?: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type|null); + + /** FieldDescriptorProto typeName */ + typeName?: (string|null); + + /** FieldDescriptorProto extendee */ + extendee?: (string|null); + + /** FieldDescriptorProto defaultValue */ + defaultValue?: (string|null); + + /** FieldDescriptorProto oneofIndex */ + oneofIndex?: (number|null); + + /** FieldDescriptorProto jsonName */ + jsonName?: (string|null); + + /** FieldDescriptorProto options */ + options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional */ + proto3Optional?: (boolean|null); + } + + /** Represents a FieldDescriptorProto. */ + class FieldDescriptorProto implements IFieldDescriptorProto { + + /** + * Constructs a new FieldDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldDescriptorProto); + + /** FieldDescriptorProto name. */ + public name: string; + + /** FieldDescriptorProto number. */ + public number: number; + + /** FieldDescriptorProto label. */ + public label: (google.protobuf.FieldDescriptorProto.Label|keyof typeof google.protobuf.FieldDescriptorProto.Label); + + /** FieldDescriptorProto type. */ + public type: (google.protobuf.FieldDescriptorProto.Type|keyof typeof google.protobuf.FieldDescriptorProto.Type); + + /** FieldDescriptorProto typeName. */ + public typeName: string; + + /** FieldDescriptorProto extendee. */ + public extendee: string; + + /** FieldDescriptorProto defaultValue. */ + public defaultValue: string; + + /** FieldDescriptorProto oneofIndex. */ + public oneofIndex: number; + + /** FieldDescriptorProto jsonName. */ + public jsonName: string; + + /** FieldDescriptorProto options. */ + public options?: (google.protobuf.IFieldOptions|null); + + /** FieldDescriptorProto proto3Optional. */ + public proto3Optional: boolean; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldDescriptorProto instance + */ + public static create(properties?: google.protobuf.IFieldDescriptorProto): google.protobuf.FieldDescriptorProto; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @param message FieldDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldDescriptorProto; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldDescriptorProto; + + /** + * Verifies a FieldDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldDescriptorProto; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @param message FieldDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace FieldDescriptorProto { + + /** Type enum. */ + enum Type { + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + TYPE_SINT32 = 17, + TYPE_SINT64 = 18 + } + + /** Label enum. */ + enum Label { + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3 + } + } + + /** Properties of an OneofDescriptorProto. */ + interface IOneofDescriptorProto { + + /** OneofDescriptorProto name */ + name?: (string|null); + + /** OneofDescriptorProto options */ + options?: (google.protobuf.IOneofOptions|null); + } + + /** Represents an OneofDescriptorProto. */ + class OneofDescriptorProto implements IOneofDescriptorProto { + + /** + * Constructs a new OneofDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofDescriptorProto); + + /** OneofDescriptorProto name. */ + public name: string; + + /** OneofDescriptorProto options. */ + public options?: (google.protobuf.IOneofOptions|null); + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofDescriptorProto instance + */ + public static create(properties?: google.protobuf.IOneofDescriptorProto): google.protobuf.OneofDescriptorProto; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @param message OneofDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofDescriptorProto; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofDescriptorProto; + + /** + * Verifies an OneofDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofDescriptorProto; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @param message OneofDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for OneofDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an EnumDescriptorProto. */ + interface IEnumDescriptorProto { + + /** EnumDescriptorProto name */ + name?: (string|null); + + /** EnumDescriptorProto value */ + value?: (google.protobuf.IEnumValueDescriptorProto[]|null); + + /** EnumDescriptorProto options */ + options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange */ + reservedRange?: (google.protobuf.EnumDescriptorProto.IEnumReservedRange[]|null); + + /** EnumDescriptorProto reservedName */ + reservedName?: (string[]|null); + } + + /** Represents an EnumDescriptorProto. */ + class EnumDescriptorProto implements IEnumDescriptorProto { + + /** + * Constructs a new EnumDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumDescriptorProto); + + /** EnumDescriptorProto name. */ + public name: string; + + /** EnumDescriptorProto value. */ + public value: google.protobuf.IEnumValueDescriptorProto[]; + + /** EnumDescriptorProto options. */ + public options?: (google.protobuf.IEnumOptions|null); + + /** EnumDescriptorProto reservedRange. */ + public reservedRange: google.protobuf.EnumDescriptorProto.IEnumReservedRange[]; + + /** EnumDescriptorProto reservedName. */ + public reservedName: string[]; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumDescriptorProto): google.protobuf.EnumDescriptorProto; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @param message EnumDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto; + + /** + * Verifies an EnumDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @param message EnumDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace EnumDescriptorProto { + + /** Properties of an EnumReservedRange. */ + interface IEnumReservedRange { + + /** EnumReservedRange start */ + start?: (number|null); + + /** EnumReservedRange end */ + end?: (number|null); + } + + /** Represents an EnumReservedRange. */ + class EnumReservedRange implements IEnumReservedRange { + + /** + * Constructs a new EnumReservedRange. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange); + + /** EnumReservedRange start. */ + public start: number; + + /** EnumReservedRange end. */ + public end: number; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumReservedRange instance + */ + public static create(properties?: google.protobuf.EnumDescriptorProto.IEnumReservedRange): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @param message EnumReservedRange message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.EnumDescriptorProto.IEnumReservedRange, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Verifies an EnumReservedRange message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumReservedRange + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumDescriptorProto.EnumReservedRange; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @param message EnumReservedRange + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumDescriptorProto.EnumReservedRange, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumReservedRange to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumReservedRange + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of an EnumValueDescriptorProto. */ + interface IEnumValueDescriptorProto { + + /** EnumValueDescriptorProto name */ + name?: (string|null); + + /** EnumValueDescriptorProto number */ + number?: (number|null); + + /** EnumValueDescriptorProto options */ + options?: (google.protobuf.IEnumValueOptions|null); + } + + /** Represents an EnumValueDescriptorProto. */ + class EnumValueDescriptorProto implements IEnumValueDescriptorProto { + + /** + * Constructs a new EnumValueDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueDescriptorProto); + + /** EnumValueDescriptorProto name. */ + public name: string; + + /** EnumValueDescriptorProto number. */ + public number: number; + + /** EnumValueDescriptorProto options. */ + public options?: (google.protobuf.IEnumValueOptions|null); + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueDescriptorProto instance + */ + public static create(properties?: google.protobuf.IEnumValueDescriptorProto): google.protobuf.EnumValueDescriptorProto; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @param message EnumValueDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueDescriptorProto; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueDescriptorProto; + + /** + * Verifies an EnumValueDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueDescriptorProto; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @param message EnumValueDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumValueDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ServiceDescriptorProto. */ + interface IServiceDescriptorProto { + + /** ServiceDescriptorProto name */ + name?: (string|null); + + /** ServiceDescriptorProto method */ + method?: (google.protobuf.IMethodDescriptorProto[]|null); + + /** ServiceDescriptorProto options */ + options?: (google.protobuf.IServiceOptions|null); + } + + /** Represents a ServiceDescriptorProto. */ + class ServiceDescriptorProto implements IServiceDescriptorProto { + + /** + * Constructs a new ServiceDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceDescriptorProto); + + /** ServiceDescriptorProto name. */ + public name: string; + + /** ServiceDescriptorProto method. */ + public method: google.protobuf.IMethodDescriptorProto[]; + + /** ServiceDescriptorProto options. */ + public options?: (google.protobuf.IServiceOptions|null); + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceDescriptorProto instance + */ + public static create(properties?: google.protobuf.IServiceDescriptorProto): google.protobuf.ServiceDescriptorProto; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @param message ServiceDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceDescriptorProto; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceDescriptorProto; + + /** + * Verifies a ServiceDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceDescriptorProto; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @param message ServiceDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ServiceDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MethodDescriptorProto. */ + interface IMethodDescriptorProto { + + /** MethodDescriptorProto name */ + name?: (string|null); + + /** MethodDescriptorProto inputType */ + inputType?: (string|null); + + /** MethodDescriptorProto outputType */ + outputType?: (string|null); + + /** MethodDescriptorProto options */ + options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming */ + clientStreaming?: (boolean|null); + + /** MethodDescriptorProto serverStreaming */ + serverStreaming?: (boolean|null); + } + + /** Represents a MethodDescriptorProto. */ + class MethodDescriptorProto implements IMethodDescriptorProto { + + /** + * Constructs a new MethodDescriptorProto. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodDescriptorProto); + + /** MethodDescriptorProto name. */ + public name: string; + + /** MethodDescriptorProto inputType. */ + public inputType: string; + + /** MethodDescriptorProto outputType. */ + public outputType: string; + + /** MethodDescriptorProto options. */ + public options?: (google.protobuf.IMethodOptions|null); + + /** MethodDescriptorProto clientStreaming. */ + public clientStreaming: boolean; + + /** MethodDescriptorProto serverStreaming. */ + public serverStreaming: boolean; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodDescriptorProto instance + */ + public static create(properties?: google.protobuf.IMethodDescriptorProto): google.protobuf.MethodDescriptorProto; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @param message MethodDescriptorProto message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodDescriptorProto, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodDescriptorProto; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodDescriptorProto; + + /** + * Verifies a MethodDescriptorProto message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodDescriptorProto + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodDescriptorProto; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @param message MethodDescriptorProto + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodDescriptorProto, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MethodDescriptorProto + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FileOptions. */ + interface IFileOptions { + + /** FileOptions javaPackage */ + javaPackage?: (string|null); + + /** FileOptions javaOuterClassname */ + javaOuterClassname?: (string|null); + + /** FileOptions javaMultipleFiles */ + javaMultipleFiles?: (boolean|null); + + /** FileOptions javaGenerateEqualsAndHash */ + javaGenerateEqualsAndHash?: (boolean|null); + + /** FileOptions javaStringCheckUtf8 */ + javaStringCheckUtf8?: (boolean|null); + + /** FileOptions optimizeFor */ + optimizeFor?: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode|null); + + /** FileOptions goPackage */ + goPackage?: (string|null); + + /** FileOptions ccGenericServices */ + ccGenericServices?: (boolean|null); + + /** FileOptions javaGenericServices */ + javaGenericServices?: (boolean|null); + + /** FileOptions pyGenericServices */ + pyGenericServices?: (boolean|null); + + /** FileOptions phpGenericServices */ + phpGenericServices?: (boolean|null); + + /** FileOptions deprecated */ + deprecated?: (boolean|null); + + /** FileOptions ccEnableArenas */ + ccEnableArenas?: (boolean|null); + + /** FileOptions objcClassPrefix */ + objcClassPrefix?: (string|null); + + /** FileOptions csharpNamespace */ + csharpNamespace?: (string|null); + + /** FileOptions swiftPrefix */ + swiftPrefix?: (string|null); + + /** FileOptions phpClassPrefix */ + phpClassPrefix?: (string|null); + + /** FileOptions phpNamespace */ + phpNamespace?: (string|null); + + /** FileOptions phpMetadataNamespace */ + phpMetadataNamespace?: (string|null); + + /** FileOptions rubyPackage */ + rubyPackage?: (string|null); + + /** FileOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FileOptions .google.api.resourceDefinition */ + ".google.api.resourceDefinition"?: (google.api.IResourceDescriptor[]|null); + } + + /** Represents a FileOptions. */ + class FileOptions implements IFileOptions { + + /** + * Constructs a new FileOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFileOptions); + + /** FileOptions javaPackage. */ + public javaPackage: string; + + /** FileOptions javaOuterClassname. */ + public javaOuterClassname: string; + + /** FileOptions javaMultipleFiles. */ + public javaMultipleFiles: boolean; + + /** FileOptions javaGenerateEqualsAndHash. */ + public javaGenerateEqualsAndHash: boolean; + + /** FileOptions javaStringCheckUtf8. */ + public javaStringCheckUtf8: boolean; + + /** FileOptions optimizeFor. */ + public optimizeFor: (google.protobuf.FileOptions.OptimizeMode|keyof typeof google.protobuf.FileOptions.OptimizeMode); + + /** FileOptions goPackage. */ + public goPackage: string; + + /** FileOptions ccGenericServices. */ + public ccGenericServices: boolean; + + /** FileOptions javaGenericServices. */ + public javaGenericServices: boolean; + + /** FileOptions pyGenericServices. */ + public pyGenericServices: boolean; + + /** FileOptions phpGenericServices. */ + public phpGenericServices: boolean; + + /** FileOptions deprecated. */ + public deprecated: boolean; + + /** FileOptions ccEnableArenas. */ + public ccEnableArenas: boolean; + + /** FileOptions objcClassPrefix. */ + public objcClassPrefix: string; + + /** FileOptions csharpNamespace. */ + public csharpNamespace: string; + + /** FileOptions swiftPrefix. */ + public swiftPrefix: string; + + /** FileOptions phpClassPrefix. */ + public phpClassPrefix: string; + + /** FileOptions phpNamespace. */ + public phpNamespace: string; + + /** FileOptions phpMetadataNamespace. */ + public phpMetadataNamespace: string; + + /** FileOptions rubyPackage. */ + public rubyPackage: string; + + /** FileOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FileOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FileOptions instance + */ + public static create(properties?: google.protobuf.IFileOptions): google.protobuf.FileOptions; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @param message FileOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFileOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FileOptions; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FileOptions; + + /** + * Verifies a FileOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FileOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FileOptions; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @param message FileOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FileOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FileOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FileOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace FileOptions { + + /** OptimizeMode enum. */ + enum OptimizeMode { + SPEED = 1, + CODE_SIZE = 2, + LITE_RUNTIME = 3 + } + } + + /** Properties of a MessageOptions. */ + interface IMessageOptions { + + /** MessageOptions messageSetWireFormat */ + messageSetWireFormat?: (boolean|null); + + /** MessageOptions noStandardDescriptorAccessor */ + noStandardDescriptorAccessor?: (boolean|null); + + /** MessageOptions deprecated */ + deprecated?: (boolean|null); + + /** MessageOptions mapEntry */ + mapEntry?: (boolean|null); + + /** MessageOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MessageOptions .google.api.resource */ + ".google.api.resource"?: (google.api.IResourceDescriptor|null); + } + + /** Represents a MessageOptions. */ + class MessageOptions implements IMessageOptions { + + /** + * Constructs a new MessageOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMessageOptions); + + /** MessageOptions messageSetWireFormat. */ + public messageSetWireFormat: boolean; + + /** MessageOptions noStandardDescriptorAccessor. */ + public noStandardDescriptorAccessor: boolean; + + /** MessageOptions deprecated. */ + public deprecated: boolean; + + /** MessageOptions mapEntry. */ + public mapEntry: boolean; + + /** MessageOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MessageOptions instance + */ + public static create(properties?: google.protobuf.IMessageOptions): google.protobuf.MessageOptions; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @param message MessageOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMessageOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MessageOptions; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MessageOptions; + + /** + * Verifies a MessageOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MessageOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MessageOptions; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @param message MessageOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MessageOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MessageOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MessageOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FieldOptions. */ + interface IFieldOptions { + + /** FieldOptions ctype */ + ctype?: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType|null); + + /** FieldOptions packed */ + packed?: (boolean|null); + + /** FieldOptions jstype */ + jstype?: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType|null); + + /** FieldOptions lazy */ + lazy?: (boolean|null); + + /** FieldOptions unverifiedLazy */ + unverifiedLazy?: (boolean|null); + + /** FieldOptions deprecated */ + deprecated?: (boolean|null); + + /** FieldOptions weak */ + weak?: (boolean|null); + + /** FieldOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** FieldOptions .google.cloud.bigquery.storage.v1.columnName */ + ".google.cloud.bigquery.storage.v1.columnName"?: (string|null); + + /** FieldOptions .google.api.fieldBehavior */ + ".google.api.fieldBehavior"?: (google.api.FieldBehavior[]|null); + + /** FieldOptions .google.api.resourceReference */ + ".google.api.resourceReference"?: (google.api.IResourceReference|null); + } + + /** Represents a FieldOptions. */ + class FieldOptions implements IFieldOptions { + + /** + * Constructs a new FieldOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldOptions); + + /** FieldOptions ctype. */ + public ctype: (google.protobuf.FieldOptions.CType|keyof typeof google.protobuf.FieldOptions.CType); + + /** FieldOptions packed. */ + public packed: boolean; + + /** FieldOptions jstype. */ + public jstype: (google.protobuf.FieldOptions.JSType|keyof typeof google.protobuf.FieldOptions.JSType); + + /** FieldOptions lazy. */ + public lazy: boolean; + + /** FieldOptions unverifiedLazy. */ + public unverifiedLazy: boolean; + + /** FieldOptions deprecated. */ + public deprecated: boolean; + + /** FieldOptions weak. */ + public weak: boolean; + + /** FieldOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldOptions instance + */ + public static create(properties?: google.protobuf.IFieldOptions): google.protobuf.FieldOptions; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @param message FieldOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions; + + /** + * Verifies a FieldOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @param message FieldOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace FieldOptions { + + /** CType enum. */ + enum CType { + STRING = 0, + CORD = 1, + STRING_PIECE = 2 + } + + /** JSType enum. */ + enum JSType { + JS_NORMAL = 0, + JS_STRING = 1, + JS_NUMBER = 2 + } + } + + /** Properties of an OneofOptions. */ + interface IOneofOptions { + + /** OneofOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an OneofOptions. */ + class OneofOptions implements IOneofOptions { + + /** + * Constructs a new OneofOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IOneofOptions); + + /** OneofOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns OneofOptions instance + */ + public static create(properties?: google.protobuf.IOneofOptions): google.protobuf.OneofOptions; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @param message OneofOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IOneofOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.OneofOptions; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.OneofOptions; + + /** + * Verifies an OneofOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns OneofOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.OneofOptions; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @param message OneofOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.OneofOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this OneofOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for OneofOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an EnumOptions. */ + interface IEnumOptions { + + /** EnumOptions allowAlias */ + allowAlias?: (boolean|null); + + /** EnumOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumOptions. */ + class EnumOptions implements IEnumOptions { + + /** + * Constructs a new EnumOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumOptions); + + /** EnumOptions allowAlias. */ + public allowAlias: boolean; + + /** EnumOptions deprecated. */ + public deprecated: boolean; + + /** EnumOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumOptions instance + */ + public static create(properties?: google.protobuf.IEnumOptions): google.protobuf.EnumOptions; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @param message EnumOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumOptions; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumOptions; + + /** + * Verifies an EnumOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumOptions; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @param message EnumOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an EnumValueOptions. */ + interface IEnumValueOptions { + + /** EnumValueOptions deprecated */ + deprecated?: (boolean|null); + + /** EnumValueOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + } + + /** Represents an EnumValueOptions. */ + class EnumValueOptions implements IEnumValueOptions { + + /** + * Constructs a new EnumValueOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEnumValueOptions); + + /** EnumValueOptions deprecated. */ + public deprecated: boolean; + + /** EnumValueOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns EnumValueOptions instance + */ + public static create(properties?: google.protobuf.IEnumValueOptions): google.protobuf.EnumValueOptions; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @param message EnumValueOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEnumValueOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.EnumValueOptions; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.EnumValueOptions; + + /** + * Verifies an EnumValueOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EnumValueOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.EnumValueOptions; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @param message EnumValueOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.EnumValueOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EnumValueOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EnumValueOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ServiceOptions. */ + interface IServiceOptions { + + /** ServiceOptions deprecated */ + deprecated?: (boolean|null); + + /** ServiceOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ServiceOptions .google.api.defaultHost */ + ".google.api.defaultHost"?: (string|null); + + /** ServiceOptions .google.api.oauthScopes */ + ".google.api.oauthScopes"?: (string|null); + } + + /** Represents a ServiceOptions. */ + class ServiceOptions implements IServiceOptions { + + /** + * Constructs a new ServiceOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IServiceOptions); + + /** ServiceOptions deprecated. */ + public deprecated: boolean; + + /** ServiceOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns ServiceOptions instance + */ + public static create(properties?: google.protobuf.IServiceOptions): google.protobuf.ServiceOptions; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @param message ServiceOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IServiceOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ServiceOptions; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ServiceOptions; + + /** + * Verifies a ServiceOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ServiceOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ServiceOptions; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @param message ServiceOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ServiceOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ServiceOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ServiceOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MethodOptions. */ + interface IMethodOptions { + + /** MethodOptions deprecated */ + deprecated?: (boolean|null); + + /** MethodOptions idempotencyLevel */ + idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); + + /** MethodOptions uninterpretedOption */ + uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** MethodOptions .google.api.http */ + ".google.api.http"?: (google.api.IHttpRule|null); + + /** MethodOptions .google.api.methodSignature */ + ".google.api.methodSignature"?: (string[]|null); + } + + /** Represents a MethodOptions. */ + class MethodOptions implements IMethodOptions { + + /** + * Constructs a new MethodOptions. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IMethodOptions); + + /** MethodOptions deprecated. */ + public deprecated: boolean; + + /** MethodOptions idempotencyLevel. */ + public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); + + /** MethodOptions uninterpretedOption. */ + public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodOptions instance + */ + public static create(properties?: google.protobuf.IMethodOptions): google.protobuf.MethodOptions; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @param message MethodOptions message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IMethodOptions, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.MethodOptions; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.MethodOptions; + + /** + * Verifies a MethodOptions message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodOptions + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.MethodOptions; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @param message MethodOptions + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.MethodOptions, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodOptions to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MethodOptions + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace MethodOptions { + + /** IdempotencyLevel enum. */ + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + NO_SIDE_EFFECTS = 1, + IDEMPOTENT = 2 + } + } + + /** Properties of an UninterpretedOption. */ + interface IUninterpretedOption { + + /** UninterpretedOption name */ + name?: (google.protobuf.UninterpretedOption.INamePart[]|null); + + /** UninterpretedOption identifierValue */ + identifierValue?: (string|null); + + /** UninterpretedOption positiveIntValue */ + positiveIntValue?: (number|Long|string|null); + + /** UninterpretedOption negativeIntValue */ + negativeIntValue?: (number|Long|string|null); + + /** UninterpretedOption doubleValue */ + doubleValue?: (number|null); + + /** UninterpretedOption stringValue */ + stringValue?: (Uint8Array|string|null); + + /** UninterpretedOption aggregateValue */ + aggregateValue?: (string|null); + } + + /** Represents an UninterpretedOption. */ + class UninterpretedOption implements IUninterpretedOption { + + /** + * Constructs a new UninterpretedOption. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUninterpretedOption); + + /** UninterpretedOption name. */ + public name: google.protobuf.UninterpretedOption.INamePart[]; + + /** UninterpretedOption identifierValue. */ + public identifierValue: string; + + /** UninterpretedOption positiveIntValue. */ + public positiveIntValue: (number|Long|string); + + /** UninterpretedOption negativeIntValue. */ + public negativeIntValue: (number|Long|string); + + /** UninterpretedOption doubleValue. */ + public doubleValue: number; + + /** UninterpretedOption stringValue. */ + public stringValue: (Uint8Array|string); + + /** UninterpretedOption aggregateValue. */ + public aggregateValue: string; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @param [properties] Properties to set + * @returns UninterpretedOption instance + */ + public static create(properties?: google.protobuf.IUninterpretedOption): google.protobuf.UninterpretedOption; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @param message UninterpretedOption message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUninterpretedOption, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption; + + /** + * Verifies an UninterpretedOption message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UninterpretedOption + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @param message UninterpretedOption + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UninterpretedOption to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UninterpretedOption + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace UninterpretedOption { + + /** Properties of a NamePart. */ + interface INamePart { + + /** NamePart namePart */ + namePart: string; + + /** NamePart isExtension */ + isExtension: boolean; + } + + /** Represents a NamePart. */ + class NamePart implements INamePart { + + /** + * Constructs a new NamePart. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.UninterpretedOption.INamePart); + + /** NamePart namePart. */ + public namePart: string; + + /** NamePart isExtension. */ + public isExtension: boolean; + + /** + * Creates a new NamePart instance using the specified properties. + * @param [properties] Properties to set + * @returns NamePart instance + */ + public static create(properties?: google.protobuf.UninterpretedOption.INamePart): google.protobuf.UninterpretedOption.NamePart; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @param message NamePart message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.UninterpretedOption.INamePart, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UninterpretedOption.NamePart; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UninterpretedOption.NamePart; + + /** + * Verifies a NamePart message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NamePart + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UninterpretedOption.NamePart; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @param message NamePart + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UninterpretedOption.NamePart, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NamePart to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for NamePart + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of a SourceCodeInfo. */ + interface ISourceCodeInfo { + + /** SourceCodeInfo location */ + location?: (google.protobuf.SourceCodeInfo.ILocation[]|null); + } + + /** Represents a SourceCodeInfo. */ + class SourceCodeInfo implements ISourceCodeInfo { + + /** + * Constructs a new SourceCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ISourceCodeInfo); + + /** SourceCodeInfo location. */ + public location: google.protobuf.SourceCodeInfo.ILocation[]; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SourceCodeInfo instance + */ + public static create(properties?: google.protobuf.ISourceCodeInfo): google.protobuf.SourceCodeInfo; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @param message SourceCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ISourceCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo; + + /** + * Verifies a SourceCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SourceCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @param message SourceCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SourceCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SourceCodeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace SourceCodeInfo { + + /** Properties of a Location. */ + interface ILocation { + + /** Location path */ + path?: (number[]|null); + + /** Location span */ + span?: (number[]|null); + + /** Location leadingComments */ + leadingComments?: (string|null); + + /** Location trailingComments */ + trailingComments?: (string|null); + + /** Location leadingDetachedComments */ + leadingDetachedComments?: (string[]|null); + } + + /** Represents a Location. */ + class Location implements ILocation { + + /** + * Constructs a new Location. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.SourceCodeInfo.ILocation); + + /** Location path. */ + public path: number[]; + + /** Location span. */ + public span: number[]; + + /** Location leadingComments. */ + public leadingComments: string; + + /** Location trailingComments. */ + public trailingComments: string; + + /** Location leadingDetachedComments. */ + public leadingDetachedComments: string[]; + + /** + * Creates a new Location instance using the specified properties. + * @param [properties] Properties to set + * @returns Location instance + */ + public static create(properties?: google.protobuf.SourceCodeInfo.ILocation): google.protobuf.SourceCodeInfo.Location; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @param message Location message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.SourceCodeInfo.ILocation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Location message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.SourceCodeInfo.Location; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.SourceCodeInfo.Location; + + /** + * Verifies a Location message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Location + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.SourceCodeInfo.Location; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @param message Location + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.SourceCodeInfo.Location, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Location to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Location + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Properties of a GeneratedCodeInfo. */ + interface IGeneratedCodeInfo { + + /** GeneratedCodeInfo annotation */ + annotation?: (google.protobuf.GeneratedCodeInfo.IAnnotation[]|null); + } + + /** Represents a GeneratedCodeInfo. */ + class GeneratedCodeInfo implements IGeneratedCodeInfo { + + /** + * Constructs a new GeneratedCodeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IGeneratedCodeInfo); + + /** GeneratedCodeInfo annotation. */ + public annotation: google.protobuf.GeneratedCodeInfo.IAnnotation[]; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns GeneratedCodeInfo instance + */ + public static create(properties?: google.protobuf.IGeneratedCodeInfo): google.protobuf.GeneratedCodeInfo; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @param message GeneratedCodeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IGeneratedCodeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo; + + /** + * Verifies a GeneratedCodeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GeneratedCodeInfo + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @param message GeneratedCodeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for GeneratedCodeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace GeneratedCodeInfo { + + /** Properties of an Annotation. */ + interface IAnnotation { + + /** Annotation path */ + path?: (number[]|null); + + /** Annotation sourceFile */ + sourceFile?: (string|null); + + /** Annotation begin */ + begin?: (number|null); + + /** Annotation end */ + end?: (number|null); + + /** Annotation semantic */ + semantic?: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null); + } + + /** Represents an Annotation. */ + class Annotation implements IAnnotation { + + /** + * Constructs a new Annotation. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation); + + /** Annotation path. */ + public path: number[]; + + /** Annotation sourceFile. */ + public sourceFile: string; + + /** Annotation begin. */ + public begin: number; + + /** Annotation end. */ + public end: number; + + /** Annotation semantic. */ + public semantic: (google.protobuf.GeneratedCodeInfo.Annotation.Semantic|keyof typeof google.protobuf.GeneratedCodeInfo.Annotation.Semantic); + + /** + * Creates a new Annotation instance using the specified properties. + * @param [properties] Properties to set + * @returns Annotation instance + */ + public static create(properties?: google.protobuf.GeneratedCodeInfo.IAnnotation): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @param message Annotation message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.GeneratedCodeInfo.IAnnotation, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Verifies an Annotation message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Annotation + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.GeneratedCodeInfo.Annotation; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @param message Annotation + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.GeneratedCodeInfo.Annotation, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Annotation to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Annotation + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace Annotation { + + /** Semantic enum. */ + enum Semantic { + NONE = 0, + SET = 1, + ALIAS = 2 + } + } + } + + /** Properties of a Timestamp. */ + interface ITimestamp { + + /** Timestamp seconds */ + seconds?: (number|Long|string|null); + + /** Timestamp nanos */ + nanos?: (number|null); + } + + /** Represents a Timestamp. */ + class Timestamp implements ITimestamp { + + /** + * Constructs a new Timestamp. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ITimestamp); + + /** Timestamp seconds. */ + public seconds: (number|Long|string); + + /** Timestamp nanos. */ + public nanos: number; + + /** + * Creates a new Timestamp instance using the specified properties. + * @param [properties] Properties to set + * @returns Timestamp instance + */ + public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; + + /** + * Verifies a Timestamp message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Timestamp + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @param message Timestamp + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Timestamp to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Timestamp + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a DoubleValue. */ + interface IDoubleValue { + + /** DoubleValue value */ + value?: (number|null); + } + + /** Represents a DoubleValue. */ + class DoubleValue implements IDoubleValue { + + /** + * Constructs a new DoubleValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDoubleValue); + + /** DoubleValue value. */ + public value: number; + + /** + * Creates a new DoubleValue instance using the specified properties. + * @param [properties] Properties to set + * @returns DoubleValue instance + */ + public static create(properties?: google.protobuf.IDoubleValue): google.protobuf.DoubleValue; + + /** + * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @param message DoubleValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @param message DoubleValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDoubleValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DoubleValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.DoubleValue; + + /** + * Decodes a DoubleValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.DoubleValue; + + /** + * Verifies a DoubleValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DoubleValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.DoubleValue; + + /** + * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. + * @param message DoubleValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.DoubleValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DoubleValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DoubleValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FloatValue. */ + interface IFloatValue { + + /** FloatValue value */ + value?: (number|null); + } + + /** Represents a FloatValue. */ + class FloatValue implements IFloatValue { + + /** + * Constructs a new FloatValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFloatValue); + + /** FloatValue value. */ + public value: number; + + /** + * Creates a new FloatValue instance using the specified properties. + * @param [properties] Properties to set + * @returns FloatValue instance + */ + public static create(properties?: google.protobuf.IFloatValue): google.protobuf.FloatValue; + + /** + * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @param message FloatValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @param message FloatValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFloatValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FloatValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FloatValue; + + /** + * Decodes a FloatValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FloatValue; + + /** + * Verifies a FloatValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FloatValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FloatValue; + + /** + * Creates a plain object from a FloatValue message. Also converts values to other types if specified. + * @param message FloatValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FloatValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FloatValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FloatValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an Int64Value. */ + interface IInt64Value { + + /** Int64Value value */ + value?: (number|Long|string|null); + } + + /** Represents an Int64Value. */ + class Int64Value implements IInt64Value { + + /** + * Constructs a new Int64Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IInt64Value); + + /** Int64Value value. */ + public value: (number|Long|string); + + /** + * Creates a new Int64Value instance using the specified properties. + * @param [properties] Properties to set + * @returns Int64Value instance + */ + public static create(properties?: google.protobuf.IInt64Value): google.protobuf.Int64Value; + + /** + * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @param message Int64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @param message Int64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Int64Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int64Value; + + /** + * Decodes an Int64Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int64Value; + + /** + * Verifies an Int64Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Int64Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Int64Value; + + /** + * Creates a plain object from an Int64Value message. Also converts values to other types if specified. + * @param message Int64Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Int64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Int64Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Int64Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a UInt64Value. */ + interface IUInt64Value { + + /** UInt64Value value */ + value?: (number|Long|string|null); + } + + /** Represents a UInt64Value. */ + class UInt64Value implements IUInt64Value { + + /** + * Constructs a new UInt64Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUInt64Value); + + /** UInt64Value value. */ + public value: (number|Long|string); + + /** + * Creates a new UInt64Value instance using the specified properties. + * @param [properties] Properties to set + * @returns UInt64Value instance + */ + public static create(properties?: google.protobuf.IUInt64Value): google.protobuf.UInt64Value; + + /** + * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @param message UInt64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @param message UInt64Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUInt64Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a UInt64Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UInt64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt64Value; + + /** + * Decodes a UInt64Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UInt64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt64Value; + + /** + * Verifies a UInt64Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UInt64Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UInt64Value; + + /** + * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. + * @param message UInt64Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UInt64Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UInt64Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UInt64Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an Int32Value. */ + interface IInt32Value { + + /** Int32Value value */ + value?: (number|null); + } + + /** Represents an Int32Value. */ + class Int32Value implements IInt32Value { + + /** + * Constructs a new Int32Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IInt32Value); + + /** Int32Value value. */ + public value: number; + + /** + * Creates a new Int32Value instance using the specified properties. + * @param [properties] Properties to set + * @returns Int32Value instance + */ + public static create(properties?: google.protobuf.IInt32Value): google.protobuf.Int32Value; + + /** + * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @param message Int32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @param message Int32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Int32Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Int32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Int32Value; + + /** + * Decodes an Int32Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Int32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Int32Value; + + /** + * Verifies an Int32Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Int32Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Int32Value; + + /** + * Creates a plain object from an Int32Value message. Also converts values to other types if specified. + * @param message Int32Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Int32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Int32Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Int32Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a UInt32Value. */ + interface IUInt32Value { + + /** UInt32Value value */ + value?: (number|null); + } + + /** Represents a UInt32Value. */ + class UInt32Value implements IUInt32Value { + + /** + * Constructs a new UInt32Value. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IUInt32Value); + + /** UInt32Value value. */ + public value: number; + + /** + * Creates a new UInt32Value instance using the specified properties. + * @param [properties] Properties to set + * @returns UInt32Value instance + */ + public static create(properties?: google.protobuf.IUInt32Value): google.protobuf.UInt32Value; + + /** + * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @param message UInt32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @param message UInt32Value message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IUInt32Value, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a UInt32Value message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.UInt32Value; + + /** + * Decodes a UInt32Value message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.UInt32Value; + + /** + * Verifies a UInt32Value message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UInt32Value + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.UInt32Value; + + /** + * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. + * @param message UInt32Value + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.UInt32Value, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UInt32Value to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UInt32Value + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BoolValue. */ + interface IBoolValue { + + /** BoolValue value */ + value?: (boolean|null); + } + + /** Represents a BoolValue. */ + class BoolValue implements IBoolValue { + + /** + * Constructs a new BoolValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IBoolValue); + + /** BoolValue value. */ + public value: boolean; + + /** + * Creates a new BoolValue instance using the specified properties. + * @param [properties] Properties to set + * @returns BoolValue instance + */ + public static create(properties?: google.protobuf.IBoolValue): google.protobuf.BoolValue; + + /** + * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @param message BoolValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @param message BoolValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IBoolValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BoolValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BoolValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BoolValue; + + /** + * Decodes a BoolValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BoolValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BoolValue; + + /** + * Verifies a BoolValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BoolValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.BoolValue; + + /** + * Creates a plain object from a BoolValue message. Also converts values to other types if specified. + * @param message BoolValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.BoolValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BoolValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BoolValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StringValue. */ + interface IStringValue { + + /** StringValue value */ + value?: (string|null); + } + + /** Represents a StringValue. */ + class StringValue implements IStringValue { + + /** + * Constructs a new StringValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IStringValue); + + /** StringValue value. */ + public value: string; + + /** + * Creates a new StringValue instance using the specified properties. + * @param [properties] Properties to set + * @returns StringValue instance + */ + public static create(properties?: google.protobuf.IStringValue): google.protobuf.StringValue; + + /** + * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @param message StringValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @param message StringValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IStringValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StringValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StringValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.StringValue; + + /** + * Decodes a StringValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StringValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.StringValue; + + /** + * Verifies a StringValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StringValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StringValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.StringValue; + + /** + * Creates a plain object from a StringValue message. Also converts values to other types if specified. + * @param message StringValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.StringValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StringValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StringValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BytesValue. */ + interface IBytesValue { + + /** BytesValue value */ + value?: (Uint8Array|string|null); + } + + /** Represents a BytesValue. */ + class BytesValue implements IBytesValue { + + /** + * Constructs a new BytesValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IBytesValue); + + /** BytesValue value. */ + public value: (Uint8Array|string); + + /** + * Creates a new BytesValue instance using the specified properties. + * @param [properties] Properties to set + * @returns BytesValue instance + */ + public static create(properties?: google.protobuf.IBytesValue): google.protobuf.BytesValue; + + /** + * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @param message BytesValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @param message BytesValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IBytesValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BytesValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BytesValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.BytesValue; + + /** + * Decodes a BytesValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BytesValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.BytesValue; + + /** + * Verifies a BytesValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BytesValue + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.BytesValue; + + /** + * Creates a plain object from a BytesValue message. Also converts values to other types if specified. + * @param message BytesValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.BytesValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BytesValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BytesValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an Any. */ + interface IAny { + + /** Any type_url */ + type_url?: (string|null); + + /** Any value */ + value?: (Uint8Array|string|null); + } + + /** Represents an Any. */ + class Any implements IAny { + + /** + * Constructs a new Any. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IAny); + + /** Any type_url. */ + public type_url: string; + + /** Any value. */ + public value: (Uint8Array|string); + + /** + * Creates a new Any instance using the specified properties. + * @param [properties] Properties to set + * @returns Any instance + */ + public static create(properties?: google.protobuf.IAny): google.protobuf.Any; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @param message Any message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IAny, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Any message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Any; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Any; + + /** + * Verifies an Any message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Any + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Any; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @param message Any + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Any, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Any to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Any + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an Empty. */ + interface IEmpty { + } + + /** Represents an Empty. */ + class Empty implements IEmpty { + + /** + * Constructs a new Empty. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IEmpty); + + /** + * Creates a new Empty instance using the specified properties. + * @param [properties] Properties to set + * @returns Empty instance + */ + public static create(properties?: google.protobuf.IEmpty): google.protobuf.Empty; + + /** + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @param message Empty message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IEmpty, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an Empty message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Empty; + + /** + * Decodes an Empty message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Empty; + + /** + * Verifies an Empty message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Empty + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Empty; + + /** + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @param message Empty + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Empty, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Empty to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Empty + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Namespace api. */ + namespace api { + + /** Properties of a Http. */ + interface IHttp { + + /** Http rules */ + rules?: (google.api.IHttpRule[]|null); + + /** Http fullyDecodeReservedExpansion */ + fullyDecodeReservedExpansion?: (boolean|null); + } + + /** Represents a Http. */ + class Http implements IHttp { + + /** + * Constructs a new Http. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttp); + + /** Http rules. */ + public rules: google.api.IHttpRule[]; + + /** Http fullyDecodeReservedExpansion. */ + public fullyDecodeReservedExpansion: boolean; + + /** + * Creates a new Http instance using the specified properties. + * @param [properties] Properties to set + * @returns Http instance + */ + public static create(properties?: google.api.IHttp): google.api.Http; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @param message Http message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Http message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Http; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Http; + + /** + * Verifies a Http message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Http + */ + public static fromObject(object: { [k: string]: any }): google.api.Http; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @param message Http + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Http, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Http to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Http + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a HttpRule. */ + interface IHttpRule { + + /** HttpRule selector */ + selector?: (string|null); + + /** HttpRule get */ + get?: (string|null); + + /** HttpRule put */ + put?: (string|null); + + /** HttpRule post */ + post?: (string|null); + + /** HttpRule delete */ + "delete"?: (string|null); + + /** HttpRule patch */ + patch?: (string|null); + + /** HttpRule custom */ + custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body */ + body?: (string|null); + + /** HttpRule responseBody */ + responseBody?: (string|null); + + /** HttpRule additionalBindings */ + additionalBindings?: (google.api.IHttpRule[]|null); + } + + /** Represents a HttpRule. */ + class HttpRule implements IHttpRule { + + /** + * Constructs a new HttpRule. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IHttpRule); + + /** HttpRule selector. */ + public selector: string; + + /** HttpRule get. */ + public get?: (string|null); + + /** HttpRule put. */ + public put?: (string|null); + + /** HttpRule post. */ + public post?: (string|null); + + /** HttpRule delete. */ + public delete?: (string|null); + + /** HttpRule patch. */ + public patch?: (string|null); + + /** HttpRule custom. */ + public custom?: (google.api.ICustomHttpPattern|null); + + /** HttpRule body. */ + public body: string; + + /** HttpRule responseBody. */ + public responseBody: string; + + /** HttpRule additionalBindings. */ + public additionalBindings: google.api.IHttpRule[]; + + /** HttpRule pattern. */ + public pattern?: ("get"|"put"|"post"|"delete"|"patch"|"custom"); + + /** + * Creates a new HttpRule instance using the specified properties. + * @param [properties] Properties to set + * @returns HttpRule instance + */ + public static create(properties?: google.api.IHttpRule): google.api.HttpRule; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @param message HttpRule message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IHttpRule, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.HttpRule; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.HttpRule; + + /** + * Verifies a HttpRule message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HttpRule + */ + public static fromObject(object: { [k: string]: any }): google.api.HttpRule; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @param message HttpRule + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.HttpRule, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HttpRule to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for HttpRule + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a CustomHttpPattern. */ + interface ICustomHttpPattern { + + /** CustomHttpPattern kind */ + kind?: (string|null); + + /** CustomHttpPattern path */ + path?: (string|null); + } + + /** Represents a CustomHttpPattern. */ + class CustomHttpPattern implements ICustomHttpPattern { + + /** + * Constructs a new CustomHttpPattern. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICustomHttpPattern); + + /** CustomHttpPattern kind. */ + public kind: string; + + /** CustomHttpPattern path. */ + public path: string; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @param [properties] Properties to set + * @returns CustomHttpPattern instance + */ + public static create(properties?: google.api.ICustomHttpPattern): google.api.CustomHttpPattern; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @param message CustomHttpPattern message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICustomHttpPattern, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CustomHttpPattern; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CustomHttpPattern; + + /** + * Verifies a CustomHttpPattern message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CustomHttpPattern + */ + public static fromObject(object: { [k: string]: any }): google.api.CustomHttpPattern; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @param message CustomHttpPattern + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CustomHttpPattern, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CustomHttpPattern to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CustomHttpPattern + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** FieldBehavior enum. */ + enum FieldBehavior { + FIELD_BEHAVIOR_UNSPECIFIED = 0, + OPTIONAL = 1, + REQUIRED = 2, + OUTPUT_ONLY = 3, + INPUT_ONLY = 4, + IMMUTABLE = 5, + UNORDERED_LIST = 6, + NON_EMPTY_DEFAULT = 7 + } + + /** Properties of a ResourceDescriptor. */ + interface IResourceDescriptor { + + /** ResourceDescriptor type */ + type?: (string|null); + + /** ResourceDescriptor pattern */ + pattern?: (string[]|null); + + /** ResourceDescriptor nameField */ + nameField?: (string|null); + + /** ResourceDescriptor history */ + history?: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History|null); + + /** ResourceDescriptor plural */ + plural?: (string|null); + + /** ResourceDescriptor singular */ + singular?: (string|null); + + /** ResourceDescriptor style */ + style?: (google.api.ResourceDescriptor.Style[]|null); + } + + /** Represents a ResourceDescriptor. */ + class ResourceDescriptor implements IResourceDescriptor { + + /** + * Constructs a new ResourceDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceDescriptor); + + /** ResourceDescriptor type. */ + public type: string; + + /** ResourceDescriptor pattern. */ + public pattern: string[]; + + /** ResourceDescriptor nameField. */ + public nameField: string; + + /** ResourceDescriptor history. */ + public history: (google.api.ResourceDescriptor.History|keyof typeof google.api.ResourceDescriptor.History); + + /** ResourceDescriptor plural. */ + public plural: string; + + /** ResourceDescriptor singular. */ + public singular: string; + + /** ResourceDescriptor style. */ + public style: google.api.ResourceDescriptor.Style[]; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceDescriptor instance + */ + public static create(properties?: google.api.IResourceDescriptor): google.api.ResourceDescriptor; + + /** + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @param message ResourceDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceDescriptor; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceDescriptor; + + /** + * Verifies a ResourceDescriptor message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceDescriptor + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceDescriptor; + + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @param message ResourceDescriptor + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceDescriptor to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ResourceDescriptor + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace ResourceDescriptor { + + /** History enum. */ + enum History { + HISTORY_UNSPECIFIED = 0, + ORIGINALLY_SINGLE_PATTERN = 1, + FUTURE_MULTI_PATTERN = 2 + } + + /** Style enum. */ + enum Style { + STYLE_UNSPECIFIED = 0, + DECLARATIVE_FRIENDLY = 1 + } + } + + /** Properties of a ResourceReference. */ + interface IResourceReference { + + /** ResourceReference type */ + type?: (string|null); + + /** ResourceReference childType */ + childType?: (string|null); + } + + /** Represents a ResourceReference. */ + class ResourceReference implements IResourceReference { + + /** + * Constructs a new ResourceReference. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IResourceReference); + + /** ResourceReference type. */ + public type: string; + + /** ResourceReference childType. */ + public childType: string; + + /** + * Creates a new ResourceReference instance using the specified properties. + * @param [properties] Properties to set + * @returns ResourceReference instance + */ + public static create(properties?: google.api.IResourceReference): google.api.ResourceReference; + + /** + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @param message ResourceReference message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IResourceReference, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ResourceReference message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ResourceReference; + + /** + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ResourceReference; + + /** + * Verifies a ResourceReference message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ResourceReference + */ + public static fromObject(object: { [k: string]: any }): google.api.ResourceReference; + + /** + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @param message ResourceReference + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ResourceReference, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ResourceReference to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ResourceReference + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** Namespace rpc. */ + namespace rpc { + + /** Properties of a Status. */ + interface IStatus { + + /** Status code */ + code?: (number|null); + + /** Status message */ + message?: (string|null); + + /** Status details */ + details?: (google.protobuf.IAny[]|null); + } + + /** Represents a Status. */ + class Status implements IStatus { + + /** + * Constructs a new Status. + * @param [properties] Properties to set + */ + constructor(properties?: google.rpc.IStatus); + + /** Status code. */ + public code: number; + + /** Status message. */ + public message: string; + + /** Status details. */ + public details: google.protobuf.IAny[]; + + /** + * Creates a new Status instance using the specified properties. + * @param [properties] Properties to set + * @returns Status instance + */ + public static create(properties?: google.rpc.IStatus): google.rpc.Status; + + /** + * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @param message Status message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.rpc.IStatus, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Status message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.rpc.Status; + + /** + * Decodes a Status message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.rpc.Status; + + /** + * Verifies a Status message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Status message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Status + */ + public static fromObject(object: { [k: string]: any }): google.rpc.Status; + + /** + * Creates a plain object from a Status message. Also converts values to other types if specified. + * @param message Status + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.rpc.Status, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Status to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Status + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } +} diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js new file mode 100644 index 00000000000..53da7298f1a --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.js @@ -0,0 +1,29973 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/ +(function(global, factory) { /* global define, require, module */ + + /* AMD */ if (typeof define === 'function' && define.amd) + define(["protobufjs/minimal"], factory); + + /* CommonJS */ else if (typeof require === 'function' && typeof module === 'object' && module && module.exports) + module.exports = factory(require("google-gax/build/src/protobuf").protobufMinimal); + +})(this, function($protobuf) { + "use strict"; + + // Common aliases + var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + + // Exported root namespace + var $root = $protobuf.roots._google_cloud_bigquery_storage_protos || ($protobuf.roots._google_cloud_bigquery_storage_protos = {}); + + $root.google = (function() { + + /** + * Namespace google. + * @exports google + * @namespace + */ + var google = {}; + + google.cloud = (function() { + + /** + * Namespace cloud. + * @memberof google + * @namespace + */ + var cloud = {}; + + cloud.bigquery = (function() { + + /** + * Namespace bigquery. + * @memberof google.cloud + * @namespace + */ + var bigquery = {}; + + bigquery.storage = (function() { + + /** + * Namespace storage. + * @memberof google.cloud.bigquery + * @namespace + */ + var storage = {}; + + storage.v1 = (function() { + + /** + * Namespace v1. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1 = {}; + + v1.ArrowSchema = (function() { + + /** + * Properties of an ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowSchema + * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema + */ + + /** + * Constructs a new ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowSchema. + * @implements IArrowSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set + */ + function ArrowSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSchema serializedSchema. + * @member {Uint8Array} serializedSchema + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @instance + */ + ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema instance + */ + ArrowSchema.create = function create(properties) { + return new ArrowSchema(properties); + }; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); + return writer; + }; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedSchema = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) + return "serializedSchema: buffer expected"; + return null; + }; + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowSchema} ArrowSchema + */ + ArrowSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); + if (object.serializedSchema != null) + if (typeof object.serializedSchema === "string") + $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); + else if (object.serializedSchema.length >= 0) + message.serializedSchema = object.serializedSchema; + return message; + }; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowSchema} message ArrowSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if (options.bytes === String) + object.serializedSchema = ""; + else { + object.serializedSchema = []; + if (options.bytes !== Array) + object.serializedSchema = $util.newBuffer(object.serializedSchema); + } + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; + return object; + }; + + /** + * Converts this ArrowSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @instance + * @returns {Object.} JSON object + */ + ArrowSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSchema"; + }; + + return ArrowSchema; + })(); + + v1.ArrowRecordBatch = (function() { + + /** + * Properties of an ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowRecordBatch + * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch + * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount + */ + + /** + * Constructs a new ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowRecordBatch. + * @implements IArrowRecordBatch + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set + */ + function ArrowRecordBatch(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowRecordBatch serializedRecordBatch. + * @member {Uint8Array} serializedRecordBatch + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); + + /** + * ArrowRecordBatch rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch instance + */ + ArrowRecordBatch.create = function create(properties) { + return new ArrowRecordBatch(properties); + }; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedRecordBatch = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowRecordBatch message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowRecordBatch.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) + return "serializedRecordBatch: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowRecordBatch} ArrowRecordBatch + */ + ArrowRecordBatch.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); + if (object.serializedRecordBatch != null) + if (typeof object.serializedRecordBatch === "string") + $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); + else if (object.serializedRecordBatch.length >= 0) + message.serializedRecordBatch = object.serializedRecordBatch; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowRecordBatch} message ArrowRecordBatch + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowRecordBatch.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedRecordBatch = ""; + else { + object.serializedRecordBatch = []; + if (options.bytes !== Array) + object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @instance + * @returns {Object.} JSON object + */ + ArrowRecordBatch.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowRecordBatch + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowRecordBatch + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowRecordBatch"; + }; + + return ArrowRecordBatch; + })(); + + v1.ArrowSerializationOptions = (function() { + + /** + * Properties of an ArrowSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IArrowSerializationOptions + * @property {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null} [bufferCompression] ArrowSerializationOptions bufferCompression + */ + + /** + * Constructs a new ArrowSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an ArrowSerializationOptions. + * @implements IArrowSerializationOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set + */ + function ArrowSerializationOptions(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSerializationOptions bufferCompression. + * @member {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec} bufferCompression + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @instance + */ + ArrowSerializationOptions.prototype.bufferCompression = 0; + + /** + * Creates a new ArrowSerializationOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions instance + */ + ArrowSerializationOptions.create = function create(properties) { + return new ArrowSerializationOptions(properties); + }; + + /** + * Encodes the specified ArrowSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSerializationOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.bufferCompression != null && Object.hasOwnProperty.call(message, "bufferCompression")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.bufferCompression); + return writer; + }; + + /** + * Encodes the specified ArrowSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IArrowSerializationOptions} message ArrowSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSerializationOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + message.bufferCompression = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSerializationOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSerializationOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSerializationOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSerializationOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) + switch (message.bufferCompression) { + default: + return "bufferCompression: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates an ArrowSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} ArrowSerializationOptions + */ + ArrowSerializationOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); + switch (object.bufferCompression) { + default: + if (typeof object.bufferCompression === "number") { + message.bufferCompression = object.bufferCompression; + break; + } + break; + case "COMPRESSION_UNSPECIFIED": + case 0: + message.bufferCompression = 0; + break; + case "LZ4_FRAME": + case 1: + message.bufferCompression = 1; + break; + case "ZSTD": + case 2: + message.bufferCompression = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from an ArrowSerializationOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ArrowSerializationOptions} message ArrowSerializationOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSerializationOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.bufferCompression = options.enums === String ? "COMPRESSION_UNSPECIFIED" : 0; + if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) + object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] === undefined ? message.bufferCompression : $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; + return object; + }; + + /** + * Converts this ArrowSerializationOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @instance + * @returns {Object.} JSON object + */ + ArrowSerializationOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowSerializationOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ArrowSerializationOptions"; + }; + + /** + * CompressionCodec enum. + * @name google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec + * @enum {number} + * @property {number} COMPRESSION_UNSPECIFIED=0 COMPRESSION_UNSPECIFIED value + * @property {number} LZ4_FRAME=1 LZ4_FRAME value + * @property {number} ZSTD=2 ZSTD value + */ + ArrowSerializationOptions.CompressionCodec = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "COMPRESSION_UNSPECIFIED"] = 0; + values[valuesById[1] = "LZ4_FRAME"] = 1; + values[valuesById[2] = "ZSTD"] = 2; + return values; + })(); + + return ArrowSerializationOptions; + })(); + + v1.AvroSchema = (function() { + + /** + * Properties of an AvroSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroSchema + * @property {string|null} [schema] AvroSchema schema + */ + + /** + * Constructs a new AvroSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroSchema. + * @implements IAvroSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set + */ + function AvroSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSchema schema. + * @member {string} schema + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @instance + */ + AvroSchema.prototype.schema = ""; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema instance + */ + AvroSchema.create = function create(properties) { + return new AvroSchema(properties); + }; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); + return writer; + }; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.schema = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.schema != null && message.hasOwnProperty("schema")) + if (!$util.isString(message.schema)) + return "schema: string expected"; + return null; + }; + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroSchema} AvroSchema + */ + AvroSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); + if (object.schema != null) + message.schema = String(object.schema); + return message; + }; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1.AvroSchema} message AvroSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.schema = ""; + if (message.schema != null && message.hasOwnProperty("schema")) + object.schema = message.schema; + return object; + }; + + /** + * Converts this AvroSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @instance + * @returns {Object.} JSON object + */ + AvroSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AvroSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSchema"; + }; + + return AvroSchema; + })(); + + v1.AvroRows = (function() { + + /** + * Properties of an AvroRows. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroRows + * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows + * @property {number|Long|null} [rowCount] AvroRows rowCount + */ + + /** + * Constructs a new AvroRows. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroRows. + * @implements IAvroRows + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set + */ + function AvroRows(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroRows serializedBinaryRows. + * @member {Uint8Array} serializedBinaryRows + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + */ + AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); + + /** + * AvroRows rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + */ + AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new AvroRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows instance + */ + AvroRows.create = function create(properties) { + return new AvroRows(properties); + }; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedBinaryRows = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) + return "serializedBinaryRows: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroRows} AvroRows + */ + AvroRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); + if (object.serializedBinaryRows != null) + if (typeof object.serializedBinaryRows === "string") + $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); + else if (object.serializedBinaryRows.length >= 0) + message.serializedBinaryRows = object.serializedBinaryRows; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1.AvroRows} message AvroRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedBinaryRows = ""; + else { + object.serializedBinaryRows = []; + if (options.bytes !== Array) + object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this AvroRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @instance + * @returns {Object.} JSON object + */ + AvroRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AvroRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroRows"; + }; + + return AvroRows; + })(); + + v1.AvroSerializationOptions = (function() { + + /** + * Properties of an AvroSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAvroSerializationOptions + * @property {boolean|null} [enableDisplayNameAttribute] AvroSerializationOptions enableDisplayNameAttribute + */ + + /** + * Constructs a new AvroSerializationOptions. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AvroSerializationOptions. + * @implements IAvroSerializationOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set + */ + function AvroSerializationOptions(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSerializationOptions enableDisplayNameAttribute. + * @member {boolean} enableDisplayNameAttribute + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @instance + */ + AvroSerializationOptions.prototype.enableDisplayNameAttribute = false; + + /** + * Creates a new AvroSerializationOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions instance + */ + AvroSerializationOptions.create = function create(properties) { + return new AvroSerializationOptions(properties); + }; + + /** + * Encodes the specified AvroSerializationOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSerializationOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.enableDisplayNameAttribute != null && Object.hasOwnProperty.call(message, "enableDisplayNameAttribute")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.enableDisplayNameAttribute); + return writer; + }; + + /** + * Encodes the specified AvroSerializationOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.IAvroSerializationOptions} message AvroSerializationOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSerializationOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSerializationOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.enableDisplayNameAttribute = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSerializationOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSerializationOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSerializationOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSerializationOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) + if (typeof message.enableDisplayNameAttribute !== "boolean") + return "enableDisplayNameAttribute: boolean expected"; + return null; + }; + + /** + * Creates an AvroSerializationOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AvroSerializationOptions} AvroSerializationOptions + */ + AvroSerializationOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); + if (object.enableDisplayNameAttribute != null) + message.enableDisplayNameAttribute = Boolean(object.enableDisplayNameAttribute); + return message; + }; + + /** + * Creates a plain object from an AvroSerializationOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {google.cloud.bigquery.storage.v1.AvroSerializationOptions} message AvroSerializationOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSerializationOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.enableDisplayNameAttribute = false; + if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) + object.enableDisplayNameAttribute = message.enableDisplayNameAttribute; + return object; + }; + + /** + * Converts this AvroSerializationOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @instance + * @returns {Object.} JSON object + */ + AvroSerializationOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AvroSerializationOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSerializationOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSerializationOptions"; + }; + + return AvroSerializationOptions; + })(); + + v1.ProtoSchema = (function() { + + /** + * Properties of a ProtoSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IProtoSchema + * @property {google.protobuf.IDescriptorProto|null} [protoDescriptor] ProtoSchema protoDescriptor + */ + + /** + * Constructs a new ProtoSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ProtoSchema. + * @implements IProtoSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set + */ + function ProtoSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ProtoSchema protoDescriptor. + * @member {google.protobuf.IDescriptorProto|null|undefined} protoDescriptor + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @instance + */ + ProtoSchema.prototype.protoDescriptor = null; + + /** + * Creates a new ProtoSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema instance + */ + ProtoSchema.create = function create(properties) { + return new ProtoSchema(properties); + }; + + /** + * Encodes the specified ProtoSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.protoDescriptor != null && Object.hasOwnProperty.call(message, "protoDescriptor")) + $root.google.protobuf.DescriptorProto.encode(message.protoDescriptor, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ProtoSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoSchema} message ProtoSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ProtoSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ProtoSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ProtoSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) { + var error = $root.google.protobuf.DescriptorProto.verify(message.protoDescriptor); + if (error) + return "protoDescriptor." + error; + } + return null; + }; + + /** + * Creates a ProtoSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ProtoSchema} ProtoSchema + */ + ProtoSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); + if (object.protoDescriptor != null) { + if (typeof object.protoDescriptor !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ProtoSchema.protoDescriptor: object expected"); + message.protoDescriptor = $root.google.protobuf.DescriptorProto.fromObject(object.protoDescriptor); + } + return message; + }; + + /** + * Creates a plain object from a ProtoSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ProtoSchema} message ProtoSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ProtoSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.protoDescriptor = null; + if (message.protoDescriptor != null && message.hasOwnProperty("protoDescriptor")) + object.protoDescriptor = $root.google.protobuf.DescriptorProto.toObject(message.protoDescriptor, options); + return object; + }; + + /** + * Converts this ProtoSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @instance + * @returns {Object.} JSON object + */ + ProtoSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ProtoSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ProtoSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoSchema"; + }; + + return ProtoSchema; + })(); + + v1.ProtoRows = (function() { + + /** + * Properties of a ProtoRows. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IProtoRows + * @property {Array.|null} [serializedRows] ProtoRows serializedRows + */ + + /** + * Constructs a new ProtoRows. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ProtoRows. + * @implements IProtoRows + * @constructor + * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set + */ + function ProtoRows(properties) { + this.serializedRows = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ProtoRows serializedRows. + * @member {Array.} serializedRows + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @instance + */ + ProtoRows.prototype.serializedRows = $util.emptyArray; + + /** + * Creates a new ProtoRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows instance + */ + ProtoRows.create = function create(properties) { + return new ProtoRows(properties); + }; + + /** + * Encodes the specified ProtoRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRows != null && message.serializedRows.length) + for (var i = 0; i < message.serializedRows.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRows[i]); + return writer; + }; + + /** + * Encodes the specified ProtoRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ProtoRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.IProtoRows} message ProtoRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ProtoRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.serializedRows && message.serializedRows.length)) + message.serializedRows = []; + message.serializedRows.push(reader.bytes()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ProtoRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ProtoRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ProtoRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRows != null && message.hasOwnProperty("serializedRows")) { + if (!Array.isArray(message.serializedRows)) + return "serializedRows: array expected"; + for (var i = 0; i < message.serializedRows.length; ++i) + if (!(message.serializedRows[i] && typeof message.serializedRows[i].length === "number" || $util.isString(message.serializedRows[i]))) + return "serializedRows: buffer[] expected"; + } + return null; + }; + + /** + * Creates a ProtoRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ProtoRows} ProtoRows + */ + ProtoRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ProtoRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); + if (object.serializedRows) { + if (!Array.isArray(object.serializedRows)) + throw TypeError(".google.cloud.bigquery.storage.v1.ProtoRows.serializedRows: array expected"); + message.serializedRows = []; + for (var i = 0; i < object.serializedRows.length; ++i) + if (typeof object.serializedRows[i] === "string") + $util.base64.decode(object.serializedRows[i], message.serializedRows[i] = $util.newBuffer($util.base64.length(object.serializedRows[i])), 0); + else if (object.serializedRows[i].length >= 0) + message.serializedRows[i] = object.serializedRows[i]; + } + return message; + }; + + /** + * Creates a plain object from a ProtoRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {google.cloud.bigquery.storage.v1.ProtoRows} message ProtoRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ProtoRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.serializedRows = []; + if (message.serializedRows && message.serializedRows.length) { + object.serializedRows = []; + for (var j = 0; j < message.serializedRows.length; ++j) + object.serializedRows[j] = options.bytes === String ? $util.base64.encode(message.serializedRows[j], 0, message.serializedRows[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRows[j]) : message.serializedRows[j]; + } + return object; + }; + + /** + * Converts this ProtoRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @instance + * @returns {Object.} JSON object + */ + ProtoRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ProtoRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ProtoRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ProtoRows"; + }; + + return ProtoRows; + })(); + + v1.BigQueryRead = (function() { + + /** + * Constructs a new BigQueryRead service. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BigQueryRead + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryRead(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryRead.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryRead; + + /** + * Creates new BigQueryRead service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryRead} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryRead.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|createReadSession}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef CreateReadSessionCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.ReadSession} [response] ReadSession + */ + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.createReadSession = function createReadSession(request, callback) { + return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1.ReadSession, request, callback); + }, "name", { value: "CreateReadSession" }); + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|readRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef ReadRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} [response] ReadRowsResponse + */ + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.readRows = function readRows(request, callback) { + return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1.ReadRowsResponse, request, callback); + }, "name", { value: "ReadRows" }); + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryRead|splitReadStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @typedef SplitReadStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} [response] SplitReadStreamResponse + */ + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryRead.prototype.splitReadStream = function splitReadStream(request, callback) { + return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse, request, callback); + }, "name", { value: "SplitReadStream" }); + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryRead + * @instance + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryRead; + })(); + + v1.BigQueryWrite = (function() { + + /** + * Constructs a new BigQueryWrite service. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BigQueryWrite + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryWrite(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryWrite.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryWrite; + + /** + * Creates new BigQueryWrite service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryWrite} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryWrite.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|createWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef CreateWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream + */ + + /** + * Calls CreateWriteStream. + * @function createWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.createWriteStream = function createWriteStream(request, callback) { + return this.rpcCall(createWriteStream, $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); + }, "name", { value: "CreateWriteStream" }); + + /** + * Calls CreateWriteStream. + * @function createWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} request CreateWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|appendRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef AppendRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} [response] AppendRowsResponse + */ + + /** + * Calls AppendRows. + * @function appendRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRowsCallback} callback Node-style callback called with the error, if any, and AppendRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.appendRows = function appendRows(request, callback) { + return this.rpcCall(appendRows, $root.google.cloud.bigquery.storage.v1.AppendRowsRequest, $root.google.cloud.bigquery.storage.v1.AppendRowsResponse, request, callback); + }, "name", { value: "AppendRows" }); + + /** + * Calls AppendRows. + * @function appendRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} request AppendRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|getWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef GetWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.WriteStream} [response] WriteStream + */ + + /** + * Calls GetWriteStream. + * @function getWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStreamCallback} callback Node-style callback called with the error, if any, and WriteStream + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.getWriteStream = function getWriteStream(request, callback) { + return this.rpcCall(getWriteStream, $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.WriteStream, request, callback); + }, "name", { value: "GetWriteStream" }); + + /** + * Calls GetWriteStream. + * @function getWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} request GetWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|finalizeWriteStream}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef FinalizeWriteStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} [response] FinalizeWriteStreamResponse + */ + + /** + * Calls FinalizeWriteStream. + * @function finalizeWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStreamCallback} callback Node-style callback called with the error, if any, and FinalizeWriteStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.finalizeWriteStream = function finalizeWriteStream(request, callback) { + return this.rpcCall(finalizeWriteStream, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest, $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse, request, callback); + }, "name", { value: "FinalizeWriteStream" }); + + /** + * Calls FinalizeWriteStream. + * @function finalizeWriteStream + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} request FinalizeWriteStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|batchCommitWriteStreams}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef BatchCommitWriteStreamsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} [response] BatchCommitWriteStreamsResponse + */ + + /** + * Calls BatchCommitWriteStreams. + * @function batchCommitWriteStreams + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreamsCallback} callback Node-style callback called with the error, if any, and BatchCommitWriteStreamsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.batchCommitWriteStreams = function batchCommitWriteStreams(request, callback) { + return this.rpcCall(batchCommitWriteStreams, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest, $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse, request, callback); + }, "name", { value: "BatchCommitWriteStreams" }); + + /** + * Calls BatchCommitWriteStreams. + * @function batchCommitWriteStreams + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} request BatchCommitWriteStreamsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1.BigQueryWrite|flushRows}. + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @typedef FlushRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} [response] FlushRowsResponse + */ + + /** + * Calls FlushRows. + * @function flushRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRowsCallback} callback Node-style callback called with the error, if any, and FlushRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryWrite.prototype.flushRows = function flushRows(request, callback) { + return this.rpcCall(flushRows, $root.google.cloud.bigquery.storage.v1.FlushRowsRequest, $root.google.cloud.bigquery.storage.v1.FlushRowsResponse, request, callback); + }, "name", { value: "FlushRows" }); + + /** + * Calls FlushRows. + * @function flushRows + * @memberof google.cloud.bigquery.storage.v1.BigQueryWrite + * @instance + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} request FlushRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryWrite; + })(); + + v1.CreateReadSessionRequest = (function() { + + /** + * Properties of a CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ICreateReadSessionRequest + * @property {string|null} [parent] CreateReadSessionRequest parent + * @property {google.cloud.bigquery.storage.v1.IReadSession|null} [readSession] CreateReadSessionRequest readSession + * @property {number|null} [maxStreamCount] CreateReadSessionRequest maxStreamCount + * @property {number|null} [preferredMinStreamCount] CreateReadSessionRequest preferredMinStreamCount + */ + + /** + * Constructs a new CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a CreateReadSessionRequest. + * @implements ICreateReadSessionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set + */ + function CreateReadSessionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateReadSessionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.parent = ""; + + /** + * CreateReadSessionRequest readSession. + * @member {google.cloud.bigquery.storage.v1.IReadSession|null|undefined} readSession + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.readSession = null; + + /** + * CreateReadSessionRequest maxStreamCount. + * @member {number} maxStreamCount + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.maxStreamCount = 0; + + /** + * CreateReadSessionRequest preferredMinStreamCount. + * @member {number} preferredMinStreamCount + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.preferredMinStreamCount = 0; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest instance + */ + CreateReadSessionRequest.create = function create(properties) { + return new CreateReadSessionRequest(properties); + }; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.readSession != null && Object.hasOwnProperty.call(message, "readSession")) + $root.google.cloud.bigquery.storage.v1.ReadSession.encode(message.readSession, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.maxStreamCount != null && Object.hasOwnProperty.call(message, "maxStreamCount")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.maxStreamCount); + if (message.preferredMinStreamCount != null && Object.hasOwnProperty.call(message, "preferredMinStreamCount")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.preferredMinStreamCount); + return writer; + }; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateReadSessionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.decode(reader, reader.uint32()); + break; + } + case 3: { + message.maxStreamCount = reader.int32(); + break; + } + case 4: { + message.preferredMinStreamCount = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateReadSessionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateReadSessionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.readSession != null && message.hasOwnProperty("readSession")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.verify(message.readSession); + if (error) + return "readSession." + error; + } + if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + if (!$util.isInteger(message.maxStreamCount)) + return "maxStreamCount: integer expected"; + if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) + if (!$util.isInteger(message.preferredMinStreamCount)) + return "preferredMinStreamCount: integer expected"; + return null; + }; + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} CreateReadSessionRequest + */ + CreateReadSessionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.readSession != null) { + if (typeof object.readSession !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.CreateReadSessionRequest.readSession: object expected"); + message.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.fromObject(object.readSession); + } + if (object.maxStreamCount != null) + message.maxStreamCount = object.maxStreamCount | 0; + if (object.preferredMinStreamCount != null) + message.preferredMinStreamCount = object.preferredMinStreamCount | 0; + return message; + }; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1.CreateReadSessionRequest} message CreateReadSessionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateReadSessionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.readSession = null; + object.maxStreamCount = 0; + object.preferredMinStreamCount = 0; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.readSession != null && message.hasOwnProperty("readSession")) + object.readSession = $root.google.cloud.bigquery.storage.v1.ReadSession.toObject(message.readSession, options); + if (message.maxStreamCount != null && message.hasOwnProperty("maxStreamCount")) + object.maxStreamCount = message.maxStreamCount; + if (message.preferredMinStreamCount != null && message.hasOwnProperty("preferredMinStreamCount")) + object.preferredMinStreamCount = message.preferredMinStreamCount; + return object; + }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateReadSessionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.CreateReadSessionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateReadSessionRequest"; + }; + + return CreateReadSessionRequest; + })(); + + v1.ReadRowsRequest = (function() { + + /** + * Properties of a ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadRowsRequest + * @property {string|null} [readStream] ReadRowsRequest readStream + * @property {number|Long|null} [offset] ReadRowsRequest offset + */ + + /** + * Constructs a new ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadRowsRequest. + * @implements IReadRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set + */ + function ReadRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsRequest readStream. + * @member {string} readStream + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.readStream = ""; + + /** + * ReadRowsRequest offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest instance + */ + ReadRowsRequest.create = function create(properties) { + return new ReadRowsRequest(properties); + }; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.readStream != null && Object.hasOwnProperty.call(message, "readStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.readStream); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); + return writer; + }; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.readStream = reader.string(); + break; + } + case 2: { + message.offset = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.readStream != null && message.hasOwnProperty("readStream")) + if (!$util.isString(message.readStream)) + return "readStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadRowsRequest} ReadRowsRequest + */ + ReadRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); + if (object.readStream != null) + message.readStream = String(object.readStream); + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ReadRowsRequest} message ReadRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.readStream = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + } + if (message.readStream != null && message.hasOwnProperty("readStream")) + object.readStream = message.readStream; + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; + + /** + * Converts this ReadRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @instance + * @returns {Object.} JSON object + */ + ReadRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsRequest"; + }; + + return ReadRowsRequest; + })(); + + v1.ThrottleState = (function() { + + /** + * Properties of a ThrottleState. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IThrottleState + * @property {number|null} [throttlePercent] ThrottleState throttlePercent + */ + + /** + * Constructs a new ThrottleState. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ThrottleState. + * @implements IThrottleState + * @constructor + * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set + */ + function ThrottleState(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ThrottleState throttlePercent. + * @member {number} throttlePercent + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @instance + */ + ThrottleState.prototype.throttlePercent = 0; + + /** + * Creates a new ThrottleState instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState instance + */ + ThrottleState.create = function create(properties) { + return new ThrottleState(properties); + }; + + /** + * Encodes the specified ThrottleState message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleState.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + return writer; + }; + + /** + * Encodes the specified ThrottleState message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ThrottleState.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.IThrottleState} message ThrottleState message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleState.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ThrottleState message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleState.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.throttlePercent = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ThrottleState message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleState.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ThrottleState message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ThrottleState.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (!$util.isInteger(message.throttlePercent)) + return "throttlePercent: integer expected"; + return null; + }; + + /** + * Creates a ThrottleState message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ThrottleState} ThrottleState + */ + ThrottleState.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ThrottleState) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); + if (object.throttlePercent != null) + message.throttlePercent = object.throttlePercent | 0; + return message; + }; + + /** + * Creates a plain object from a ThrottleState message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {google.cloud.bigquery.storage.v1.ThrottleState} message ThrottleState + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ThrottleState.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.throttlePercent = 0; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + object.throttlePercent = message.throttlePercent; + return object; + }; + + /** + * Converts this ThrottleState to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @instance + * @returns {Object.} JSON object + */ + ThrottleState.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ThrottleState + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ThrottleState + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ThrottleState.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ThrottleState"; + }; + + return ThrottleState; + })(); + + v1.StreamStats = (function() { + + /** + * Properties of a StreamStats. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IStreamStats + * @property {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null} [progress] StreamStats progress + */ + + /** + * Constructs a new StreamStats. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a StreamStats. + * @implements IStreamStats + * @constructor + * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set + */ + function StreamStats(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamStats progress. + * @member {google.cloud.bigquery.storage.v1.StreamStats.IProgress|null|undefined} progress + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @instance + */ + StreamStats.prototype.progress = null; + + /** + * Creates a new StreamStats instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats instance + */ + StreamStats.create = function create(properties) { + return new StreamStats(properties); + }; + + /** + * Encodes the specified StreamStats message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStats.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) + $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.encode(message.progress, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamStats message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.IStreamStats} message StreamStats message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStats.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamStats message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStats.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamStats message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStats.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamStats message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamStats.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.progress != null && message.hasOwnProperty("progress")) { + var error = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.verify(message.progress); + if (error) + return "progress." + error; + } + return null; + }; + + /** + * Creates a StreamStats message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.StreamStats} StreamStats + */ + StreamStats.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); + if (object.progress != null) { + if (typeof object.progress !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.StreamStats.progress: object expected"); + message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.fromObject(object.progress); + } + return message; + }; + + /** + * Creates a plain object from a StreamStats message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats} message StreamStats + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamStats.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.progress = null; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.toObject(message.progress, options); + return object; + }; + + /** + * Converts this StreamStats to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @instance + * @returns {Object.} JSON object + */ + StreamStats.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamStats + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamStats.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats"; + }; + + StreamStats.Progress = (function() { + + /** + * Properties of a Progress. + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @interface IProgress + * @property {number|null} [atResponseStart] Progress atResponseStart + * @property {number|null} [atResponseEnd] Progress atResponseEnd + */ + + /** + * Constructs a new Progress. + * @memberof google.cloud.bigquery.storage.v1.StreamStats + * @classdesc Represents a Progress. + * @implements IProgress + * @constructor + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set + */ + function Progress(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Progress atResponseStart. + * @member {number} atResponseStart + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + */ + Progress.prototype.atResponseStart = 0; + + /** + * Progress atResponseEnd. + * @member {number} atResponseEnd + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + */ + Progress.prototype.atResponseEnd = 0; + + /** + * Creates a new Progress instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress instance + */ + Progress.create = function create(properties) { + return new Progress(properties); + }; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.atResponseStart); + if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.atResponseEnd); + return writer; + }; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StreamStats.Progress.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.atResponseStart = reader.double(); + break; + } + case 2: { + message.atResponseEnd = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Progress message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Progress.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (typeof message.atResponseStart !== "number") + return "atResponseStart: number expected"; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (typeof message.atResponseEnd !== "number") + return "atResponseEnd: number expected"; + return null; + }; + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.StreamStats.Progress} Progress + */ + Progress.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StreamStats.Progress) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); + if (object.atResponseStart != null) + message.atResponseStart = Number(object.atResponseStart); + if (object.atResponseEnd != null) + message.atResponseEnd = Number(object.atResponseEnd); + return message; + }; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {google.cloud.bigquery.storage.v1.StreamStats.Progress} message Progress + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Progress.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.atResponseStart = 0; + object.atResponseEnd = 0; + } + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + return object; + }; + + /** + * Converts this Progress to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @instance + * @returns {Object.} JSON object + */ + Progress.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Progress + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StreamStats.Progress + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StreamStats.Progress"; + }; + + return Progress; + })(); + + return StreamStats; + })(); + + v1.ReadRowsResponse = (function() { + + /** + * Properties of a ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadRowsResponse + * @property {google.cloud.bigquery.storage.v1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows + * @property {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch + * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount + * @property {google.cloud.bigquery.storage.v1.IStreamStats|null} [stats] ReadRowsResponse stats + * @property {google.cloud.bigquery.storage.v1.IThrottleState|null} [throttleState] ReadRowsResponse throttleState + * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema + */ + + /** + * Constructs a new ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadRowsResponse. + * @implements IReadRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set + */ + function ReadRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsResponse avroRows. + * @member {google.cloud.bigquery.storage.v1.IAvroRows|null|undefined} avroRows + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroRows = null; + + /** + * ReadRowsResponse arrowRecordBatch. + * @member {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null|undefined} arrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowRecordBatch = null; + + /** + * ReadRowsResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadRowsResponse stats. + * @member {google.cloud.bigquery.storage.v1.IStreamStats|null|undefined} stats + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.stats = null; + + /** + * ReadRowsResponse throttleState. + * @member {google.cloud.bigquery.storage.v1.IThrottleState|null|undefined} throttleState + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.throttleState = null; + + /** + * ReadRowsResponse avroSchema. + * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroSchema = null; + + /** + * ReadRowsResponse arrowSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowSchema = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadRowsResponse rows. + * @member {"avroRows"|"arrowRecordBatch"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * ReadRowsResponse schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse instance + */ + ReadRowsResponse.create = function create(properties) { + return new ReadRowsResponse(properties); + }; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stats != null && Object.hasOwnProperty.call(message, "stats")) + $root.google.cloud.bigquery.storage.v1.StreamStats.encode(message.stats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) + $root.google.cloud.bigquery.storage.v1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) + $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.throttleState != null && Object.hasOwnProperty.call(message, "throttleState")) + $root.google.cloud.bigquery.storage.v1.ThrottleState.encode(message.throttleState, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); + break; + } + case 4: { + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + } + case 6: { + message.rowCount = reader.int64(); + break; + } + case 2: { + message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.decode(reader, reader.uint32()); + break; + } + case 5: { + message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.decode(reader, reader.uint32()); + break; + } + case 7: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 8: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroRows.verify(message.avroRows); + if (error) + return "avroRows." + error; + } + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify(message.arrowRecordBatch); + if (error) + return "arrowRecordBatch." + error; + } + } + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + if (message.stats != null && message.hasOwnProperty("stats")) { + var error = $root.google.cloud.bigquery.storage.v1.StreamStats.verify(message.stats); + if (error) + return "stats." + error; + } + if (message.throttleState != null && message.hasOwnProperty("throttleState")) { + var error = $root.google.cloud.bigquery.storage.v1.ThrottleState.verify(message.throttleState); + if (error) + return "throttleState." + error; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + return null; + }; + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadRowsResponse} ReadRowsResponse + */ + ReadRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); + if (object.avroRows != null) { + if (typeof object.avroRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroRows: object expected"); + message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.fromObject(object.avroRows); + } + if (object.arrowRecordBatch != null) { + if (typeof object.arrowRecordBatch !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowRecordBatch: object expected"); + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); + } + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + if (object.stats != null) { + if (typeof object.stats !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.stats: object expected"); + message.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.fromObject(object.stats); + } + if (object.throttleState != null) { + if (typeof object.throttleState !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.throttleState: object expected"); + message.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.fromObject(object.throttleState); + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ReadRowsResponse} message ReadRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.stats = null; + object.throttleState = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.stats != null && message.hasOwnProperty("stats")) + object.stats = $root.google.cloud.bigquery.storage.v1.StreamStats.toObject(message.stats, options); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + object.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.toObject(message.avroRows, options); + if (options.oneofs) + object.rows = "avroRows"; + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); + if (options.oneofs) + object.rows = "arrowRecordBatch"; + } + if (message.throttleState != null && message.hasOwnProperty("throttleState")) + object.throttleState = $root.google.cloud.bigquery.storage.v1.ThrottleState.toObject(message.throttleState, options); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + return object; + }; + + /** + * Converts this ReadRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + * @returns {Object.} JSON object + */ + ReadRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadRowsResponse"; + }; + + return ReadRowsResponse; + })(); + + v1.SplitReadStreamRequest = (function() { + + /** + * Properties of a SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ISplitReadStreamRequest + * @property {string|null} [name] SplitReadStreamRequest name + * @property {number|null} [fraction] SplitReadStreamRequest fraction + */ + + /** + * Constructs a new SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a SplitReadStreamRequest. + * @implements ISplitReadStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set + */ + function SplitReadStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamRequest name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.name = ""; + + /** + * SplitReadStreamRequest fraction. + * @member {number} fraction + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.fraction = 0; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest instance + */ + SplitReadStreamRequest.create = function create(properties) { + return new SplitReadStreamRequest(properties); + }; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) + writer.uint32(/* id 2, wireType 1 =*/17).double(message.fraction); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.fraction = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.fraction != null && message.hasOwnProperty("fraction")) + if (typeof message.fraction !== "number") + return "fraction: number expected"; + return null; + }; + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} SplitReadStreamRequest + */ + SplitReadStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); + if (object.name != null) + message.name = String(object.name); + if (object.fraction != null) + message.fraction = Number(object.fraction); + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.fraction = 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.fraction != null && message.hasOwnProperty("fraction")) + object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + return object; + }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamRequest"; + }; + + return SplitReadStreamRequest; + })(); + + v1.SplitReadStreamResponse = (function() { + + /** + * Properties of a SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ISplitReadStreamResponse + * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [primaryStream] SplitReadStreamResponse primaryStream + * @property {google.cloud.bigquery.storage.v1.IReadStream|null} [remainderStream] SplitReadStreamResponse remainderStream + */ + + /** + * Constructs a new SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a SplitReadStreamResponse. + * @implements ISplitReadStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set + */ + function SplitReadStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamResponse primaryStream. + * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} primaryStream + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.primaryStream = null; + + /** + * SplitReadStreamResponse remainderStream. + * @member {google.cloud.bigquery.storage.v1.IReadStream|null|undefined} remainderStream + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.remainderStream = null; + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse instance + */ + SplitReadStreamResponse.create = function create(properties) { + return new SplitReadStreamResponse(properties); + }; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.primaryStream); + if (error) + return "primaryStream." + error; + } + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.remainderStream); + if (error) + return "remainderStream." + error; + } + return null; + }; + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} SplitReadStreamResponse + */ + SplitReadStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); + if (object.primaryStream != null) { + if (typeof object.primaryStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.primaryStream: object expected"); + message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.primaryStream); + } + if (object.remainderStream != null) { + if (typeof object.remainderStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.SplitReadStreamResponse.remainderStream: object expected"); + message.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.remainderStream); + } + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.primaryStream = null; + object.remainderStream = null; + } + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + object.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.primaryStream, options); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + object.remainderStream = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.remainderStream, options); + return object; + }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.SplitReadStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.SplitReadStreamResponse"; + }; + + return SplitReadStreamResponse; + })(); + + v1.CreateWriteStreamRequest = (function() { + + /** + * Properties of a CreateWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ICreateWriteStreamRequest + * @property {string|null} [parent] CreateWriteStreamRequest parent + * @property {google.cloud.bigquery.storage.v1.IWriteStream|null} [writeStream] CreateWriteStreamRequest writeStream + */ + + /** + * Constructs a new CreateWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a CreateWriteStreamRequest. + * @implements ICreateWriteStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set + */ + function CreateWriteStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateWriteStreamRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @instance + */ + CreateWriteStreamRequest.prototype.parent = ""; + + /** + * CreateWriteStreamRequest writeStream. + * @member {google.cloud.bigquery.storage.v1.IWriteStream|null|undefined} writeStream + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @instance + */ + CreateWriteStreamRequest.prototype.writeStream = null; + + /** + * Creates a new CreateWriteStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest instance + */ + CreateWriteStreamRequest.create = function create(properties) { + return new CreateWriteStreamRequest(properties); + }; + + /** + * Encodes the specified CreateWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateWriteStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + $root.google.cloud.bigquery.storage.v1.WriteStream.encode(message.writeStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified CreateWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest} message CreateWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateWriteStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateWriteStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateWriteStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateWriteStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) { + var error = $root.google.cloud.bigquery.storage.v1.WriteStream.verify(message.writeStream); + if (error) + return "writeStream." + error; + } + return null; + }; + + /** + * Creates a CreateWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} CreateWriteStreamRequest + */ + CreateWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.writeStream != null) { + if (typeof object.writeStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.CreateWriteStreamRequest.writeStream: object expected"); + message.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.fromObject(object.writeStream); + } + return message; + }; + + /** + * Creates a plain object from a CreateWriteStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.CreateWriteStreamRequest} message CreateWriteStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateWriteStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.writeStream = null; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = $root.google.cloud.bigquery.storage.v1.WriteStream.toObject(message.writeStream, options); + return object; + }; + + /** + * Converts this CreateWriteStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @instance + * @returns {Object.} JSON object + */ + CreateWriteStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CreateWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.CreateWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.CreateWriteStreamRequest"; + }; + + return CreateWriteStreamRequest; + })(); + + v1.AppendRowsRequest = (function() { + + /** + * Properties of an AppendRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAppendRowsRequest + * @property {string|null} [writeStream] AppendRowsRequest writeStream + * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset + * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows + * @property {string|null} [traceId] AppendRowsRequest traceId + */ + + /** + * Constructs a new AppendRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AppendRowsRequest. + * @implements IAppendRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set + */ + function AppendRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AppendRowsRequest writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.writeStream = ""; + + /** + * AppendRowsRequest offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.offset = null; + + /** + * AppendRowsRequest protoRows. + * @member {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null|undefined} protoRows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.protoRows = null; + + /** + * AppendRowsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.traceId = ""; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * AppendRowsRequest rows. + * @member {"protoRows"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + Object.defineProperty(AppendRowsRequest.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["protoRows"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new AppendRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest instance + */ + AppendRowsRequest.create = function create(properties) { + return new AppendRowsRequest(properties); + }; + + /** + * Encodes the specified AppendRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.protoRows != null && Object.hasOwnProperty.call(message, "protoRows")) + $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified AppendRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest} message AppendRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AppendRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.writeStream = reader.string(); + break; + } + case 2: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } + case 4: { + message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); + break; + } + case 6: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AppendRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AppendRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AppendRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); + if (error) + return "offset." + error; + } + if (message.protoRows != null && message.hasOwnProperty("protoRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify(message.protoRows); + if (error) + return "protoRows." + error; + } + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates an AppendRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest} AppendRowsRequest + */ + AppendRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); + if (object.writeStream != null) + message.writeStream = String(object.writeStream); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); + } + if (object.protoRows != null) { + if (typeof object.protoRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.protoRows: object expected"); + message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.fromObject(object.protoRows); + } + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from an AppendRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest} message AppendRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AppendRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.writeStream = ""; + object.offset = null; + object.traceId = ""; + } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); + if (message.protoRows != null && message.hasOwnProperty("protoRows")) { + object.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.toObject(message.protoRows, options); + if (options.oneofs) + object.rows = "protoRows"; + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this AppendRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + * @returns {Object.} JSON object + */ + AppendRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AppendRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest"; + }; + + AppendRowsRequest.ProtoData = (function() { + + /** + * Properties of a ProtoData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @interface IProtoData + * @property {google.cloud.bigquery.storage.v1.IProtoSchema|null} [writerSchema] ProtoData writerSchema + * @property {google.cloud.bigquery.storage.v1.IProtoRows|null} [rows] ProtoData rows + */ + + /** + * Constructs a new ProtoData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @classdesc Represents a ProtoData. + * @implements IProtoData + * @constructor + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set + */ + function ProtoData(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ProtoData writerSchema. + * @member {google.cloud.bigquery.storage.v1.IProtoSchema|null|undefined} writerSchema + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @instance + */ + ProtoData.prototype.writerSchema = null; + + /** + * ProtoData rows. + * @member {google.cloud.bigquery.storage.v1.IProtoRows|null|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @instance + */ + ProtoData.prototype.rows = null; + + /** + * Creates a new ProtoData instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData instance + */ + ProtoData.create = function create(properties) { + return new ProtoData(properties); + }; + + /** + * Encodes the specified ProtoData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoData.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.writerSchema != null && Object.hasOwnProperty.call(message, "writerSchema")) + $root.google.cloud.bigquery.storage.v1.ProtoSchema.encode(message.writerSchema, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.rows != null && Object.hasOwnProperty.call(message, "rows")) + $root.google.cloud.bigquery.storage.v1.ProtoRows.encode(message.rows, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ProtoData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData} message ProtoData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ProtoData.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ProtoData message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoData.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); + break; + } + case 2: { + message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ProtoData message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ProtoData.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ProtoData message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ProtoData.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.ProtoSchema.verify(message.writerSchema); + if (error) + return "writerSchema." + error; + } + if (message.rows != null && message.hasOwnProperty("rows")) { + var error = $root.google.cloud.bigquery.storage.v1.ProtoRows.verify(message.rows); + if (error) + return "rows." + error; + } + return null; + }; + + /** + * Creates a ProtoData message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} ProtoData + */ + ProtoData.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); + if (object.writerSchema != null) { + if (typeof object.writerSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.writerSchema: object expected"); + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.fromObject(object.writerSchema); + } + if (object.rows != null) { + if (typeof object.rows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.rows: object expected"); + message.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.fromObject(object.rows); + } + return message; + }; + + /** + * Creates a plain object from a ProtoData message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData} message ProtoData + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ProtoData.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.writerSchema = null; + object.rows = null; + } + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) + object.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.toObject(message.writerSchema, options); + if (message.rows != null && message.hasOwnProperty("rows")) + object.rows = $root.google.cloud.bigquery.storage.v1.ProtoRows.toObject(message.rows, options); + return object; + }; + + /** + * Converts this ProtoData to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @instance + * @returns {Object.} JSON object + */ + ProtoData.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ProtoData + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ProtoData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"; + }; + + return ProtoData; + })(); + + return AppendRowsRequest; + })(); + + v1.AppendRowsResponse = (function() { + + /** + * Properties of an AppendRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IAppendRowsResponse + * @property {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null} [appendResult] AppendRowsResponse appendResult + * @property {google.rpc.IStatus|null} [error] AppendRowsResponse error + * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [updatedSchema] AppendRowsResponse updatedSchema + * @property {Array.|null} [rowErrors] AppendRowsResponse rowErrors + * @property {string|null} [writeStream] AppendRowsResponse writeStream + */ + + /** + * Constructs a new AppendRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents an AppendRowsResponse. + * @implements IAppendRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set + */ + function AppendRowsResponse(properties) { + this.rowErrors = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AppendRowsResponse appendResult. + * @member {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult|null|undefined} appendResult + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.appendResult = null; + + /** + * AppendRowsResponse error. + * @member {google.rpc.IStatus|null|undefined} error + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.error = null; + + /** + * AppendRowsResponse updatedSchema. + * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} updatedSchema + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.updatedSchema = null; + + /** + * AppendRowsResponse rowErrors. + * @member {Array.} rowErrors + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.rowErrors = $util.emptyArray; + + /** + * AppendRowsResponse writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + AppendRowsResponse.prototype.writeStream = ""; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * AppendRowsResponse response. + * @member {"appendResult"|"error"|undefined} response + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + */ + Object.defineProperty(AppendRowsResponse.prototype, "response", { + get: $util.oneOfGetter($oneOfFields = ["appendResult", "error"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new AppendRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse instance + */ + AppendRowsResponse.create = function create(properties) { + return new AppendRowsResponse(properties); + }; + + /** + * Encodes the specified AppendRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.appendResult != null && Object.hasOwnProperty.call(message, "appendResult")) + $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.encode(message.appendResult, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.error != null && Object.hasOwnProperty.call(message, "error")) + $root.google.rpc.Status.encode(message.error, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.updatedSchema != null && Object.hasOwnProperty.call(message, "updatedSchema")) + $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.updatedSchema, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.rowErrors != null && message.rowErrors.length) + for (var i = 0; i < message.rowErrors.length; ++i) + $root.google.cloud.bigquery.storage.v1.RowError.encode(message.rowErrors[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.writeStream); + return writer; + }; + + /** + * Encodes the specified AppendRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IAppendRowsResponse} message AppendRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); + break; + } + case 2: { + message.error = $root.google.rpc.Status.decode(reader, reader.uint32()); + break; + } + case 3: { + message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.rowErrors && message.rowErrors.length)) + message.rowErrors = []; + message.rowErrors.push($root.google.cloud.bigquery.storage.v1.RowError.decode(reader, reader.uint32())); + break; + } + case 5: { + message.writeStream = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AppendRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AppendRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AppendRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.appendResult != null && message.hasOwnProperty("appendResult")) { + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify(message.appendResult); + if (error) + return "appendResult." + error; + } + } + if (message.error != null && message.hasOwnProperty("error")) { + if (properties.response === 1) + return "response: multiple values"; + properties.response = 1; + { + var error = $root.google.rpc.Status.verify(message.error); + if (error) + return "error." + error; + } + } + if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.updatedSchema); + if (error) + return "updatedSchema." + error; + } + if (message.rowErrors != null && message.hasOwnProperty("rowErrors")) { + if (!Array.isArray(message.rowErrors)) + return "rowErrors: array expected"; + for (var i = 0; i < message.rowErrors.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.RowError.verify(message.rowErrors[i]); + if (error) + return "rowErrors." + error; + } + } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; + return null; + }; + + /** + * Creates an AppendRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse} AppendRowsResponse + */ + AppendRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); + if (object.appendResult != null) { + if (typeof object.appendResult !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.appendResult: object expected"); + message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.fromObject(object.appendResult); + } + if (object.error != null) { + if (typeof object.error !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.error: object expected"); + message.error = $root.google.rpc.Status.fromObject(object.error); + } + if (object.updatedSchema != null) { + if (typeof object.updatedSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.updatedSchema: object expected"); + message.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.updatedSchema); + } + if (object.rowErrors) { + if (!Array.isArray(object.rowErrors)) + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: array expected"); + message.rowErrors = []; + for (var i = 0; i < object.rowErrors.length; ++i) { + if (typeof object.rowErrors[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.rowErrors: object expected"); + message.rowErrors[i] = $root.google.cloud.bigquery.storage.v1.RowError.fromObject(object.rowErrors[i]); + } + } + if (object.writeStream != null) + message.writeStream = String(object.writeStream); + return message; + }; + + /** + * Creates a plain object from an AppendRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse} message AppendRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AppendRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.rowErrors = []; + if (options.defaults) { + object.updatedSchema = null; + object.writeStream = ""; + } + if (message.appendResult != null && message.hasOwnProperty("appendResult")) { + object.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.toObject(message.appendResult, options); + if (options.oneofs) + object.response = "appendResult"; + } + if (message.error != null && message.hasOwnProperty("error")) { + object.error = $root.google.rpc.Status.toObject(message.error, options); + if (options.oneofs) + object.response = "error"; + } + if (message.updatedSchema != null && message.hasOwnProperty("updatedSchema")) + object.updatedSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.updatedSchema, options); + if (message.rowErrors && message.rowErrors.length) { + object.rowErrors = []; + for (var j = 0; j < message.rowErrors.length; ++j) + object.rowErrors[j] = $root.google.cloud.bigquery.storage.v1.RowError.toObject(message.rowErrors[j], options); + } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; + return object; + }; + + /** + * Converts this AppendRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @instance + * @returns {Object.} JSON object + */ + AppendRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AppendRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse"; + }; + + AppendRowsResponse.AppendResult = (function() { + + /** + * Properties of an AppendResult. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @interface IAppendResult + * @property {google.protobuf.IInt64Value|null} [offset] AppendResult offset + */ + + /** + * Constructs a new AppendResult. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse + * @classdesc Represents an AppendResult. + * @implements IAppendResult + * @constructor + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set + */ + function AppendResult(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AppendResult offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @instance + */ + AppendResult.prototype.offset = null; + + /** + * Creates a new AppendResult instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult instance + */ + AppendResult.create = function create(properties) { + return new AppendResult(properties); + }; + + /** + * Encodes the specified AppendResult message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendResult.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified AppendResult message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.IAppendResult} message AppendResult message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AppendResult.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AppendResult message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendResult.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AppendResult message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AppendResult.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AppendResult message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AppendResult.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); + if (error) + return "offset." + error; + } + return null; + }; + + /** + * Creates an AppendResult message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} AppendResult + */ + AppendResult.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); + } + return message; + }; + + /** + * Creates a plain object from an AppendResult message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult} message AppendResult + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AppendResult.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.offset = null; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); + return object; + }; + + /** + * Converts this AppendResult to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @instance + * @returns {Object.} JSON object + */ + AppendResult.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AppendResult + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AppendResult.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"; + }; + + return AppendResult; + })(); + + return AppendRowsResponse; + })(); + + v1.GetWriteStreamRequest = (function() { + + /** + * Properties of a GetWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IGetWriteStreamRequest + * @property {string|null} [name] GetWriteStreamRequest name + * @property {google.cloud.bigquery.storage.v1.WriteStreamView|null} [view] GetWriteStreamRequest view + */ + + /** + * Constructs a new GetWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a GetWriteStreamRequest. + * @implements IGetWriteStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set + */ + function GetWriteStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetWriteStreamRequest name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @instance + */ + GetWriteStreamRequest.prototype.name = ""; + + /** + * GetWriteStreamRequest view. + * @member {google.cloud.bigquery.storage.v1.WriteStreamView} view + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @instance + */ + GetWriteStreamRequest.prototype.view = 0; + + /** + * Creates a new GetWriteStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest instance + */ + GetWriteStreamRequest.create = function create(properties) { + return new GetWriteStreamRequest(properties); + }; + + /** + * Encodes the specified GetWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetWriteStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.view != null && Object.hasOwnProperty.call(message, "view")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.view); + return writer; + }; + + /** + * Encodes the specified GetWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.GetWriteStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IGetWriteStreamRequest} message GetWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a GetWriteStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetWriteStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 3: { + message.view = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a GetWriteStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a GetWriteStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetWriteStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.view != null && message.hasOwnProperty("view")) + switch (message.view) { + default: + return "view: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates a GetWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} GetWriteStreamRequest + */ + GetWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); + if (object.name != null) + message.name = String(object.name); + switch (object.view) { + default: + if (typeof object.view === "number") { + message.view = object.view; + break; + } + break; + case "WRITE_STREAM_VIEW_UNSPECIFIED": + case 0: + message.view = 0; + break; + case "BASIC": + case 1: + message.view = 1; + break; + case "FULL": + case 2: + message.view = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from a GetWriteStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.GetWriteStreamRequest} message GetWriteStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + GetWriteStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.view = options.enums === String ? "WRITE_STREAM_VIEW_UNSPECIFIED" : 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.view != null && message.hasOwnProperty("view")) + object.view = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] === undefined ? message.view : $root.google.cloud.bigquery.storage.v1.WriteStreamView[message.view] : message.view; + return object; + }; + + /** + * Converts this GetWriteStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @instance + * @returns {Object.} JSON object + */ + GetWriteStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for GetWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.GetWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + GetWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.GetWriteStreamRequest"; + }; + + return GetWriteStreamRequest; + })(); + + v1.BatchCommitWriteStreamsRequest = (function() { + + /** + * Properties of a BatchCommitWriteStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IBatchCommitWriteStreamsRequest + * @property {string|null} [parent] BatchCommitWriteStreamsRequest parent + * @property {Array.|null} [writeStreams] BatchCommitWriteStreamsRequest writeStreams + */ + + /** + * Constructs a new BatchCommitWriteStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BatchCommitWriteStreamsRequest. + * @implements IBatchCommitWriteStreamsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set + */ + function BatchCommitWriteStreamsRequest(properties) { + this.writeStreams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCommitWriteStreamsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @instance + */ + BatchCommitWriteStreamsRequest.prototype.parent = ""; + + /** + * BatchCommitWriteStreamsRequest writeStreams. + * @member {Array.} writeStreams + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @instance + */ + BatchCommitWriteStreamsRequest.prototype.writeStreams = $util.emptyArray; + + /** + * Creates a new BatchCommitWriteStreamsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest instance + */ + BatchCommitWriteStreamsRequest.create = function create(properties) { + return new BatchCommitWriteStreamsRequest(properties); + }; + + /** + * Encodes the specified BatchCommitWriteStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCommitWriteStreamsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.writeStreams != null && message.writeStreams.length) + for (var i = 0; i < message.writeStreams.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.writeStreams[i]); + return writer; + }; + + /** + * Encodes the specified BatchCommitWriteStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCommitWriteStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCommitWriteStreamsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.writeStreams && message.writeStreams.length)) + message.writeStreams = []; + message.writeStreams.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCommitWriteStreamsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCommitWriteStreamsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCommitWriteStreamsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCommitWriteStreamsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.writeStreams != null && message.hasOwnProperty("writeStreams")) { + if (!Array.isArray(message.writeStreams)) + return "writeStreams: array expected"; + for (var i = 0; i < message.writeStreams.length; ++i) + if (!$util.isString(message.writeStreams[i])) + return "writeStreams: string[] expected"; + } + return null; + }; + + /** + * Creates a BatchCommitWriteStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} BatchCommitWriteStreamsRequest + */ + BatchCommitWriteStreamsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.writeStreams) { + if (!Array.isArray(object.writeStreams)) + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest.writeStreams: array expected"); + message.writeStreams = []; + for (var i = 0; i < object.writeStreams.length; ++i) + message.writeStreams[i] = String(object.writeStreams[i]); + } + return message; + }; + + /** + * Creates a plain object from a BatchCommitWriteStreamsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest} message BatchCommitWriteStreamsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCommitWriteStreamsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.writeStreams = []; + if (options.defaults) + object.parent = ""; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.writeStreams && message.writeStreams.length) { + object.writeStreams = []; + for (var j = 0; j < message.writeStreams.length; ++j) + object.writeStreams[j] = message.writeStreams[j]; + } + return object; + }; + + /** + * Converts this BatchCommitWriteStreamsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCommitWriteStreamsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCommitWriteStreamsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCommitWriteStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest"; + }; + + return BatchCommitWriteStreamsRequest; + })(); + + v1.BatchCommitWriteStreamsResponse = (function() { + + /** + * Properties of a BatchCommitWriteStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IBatchCommitWriteStreamsResponse + * @property {google.protobuf.ITimestamp|null} [commitTime] BatchCommitWriteStreamsResponse commitTime + * @property {Array.|null} [streamErrors] BatchCommitWriteStreamsResponse streamErrors + */ + + /** + * Constructs a new BatchCommitWriteStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a BatchCommitWriteStreamsResponse. + * @implements IBatchCommitWriteStreamsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set + */ + function BatchCommitWriteStreamsResponse(properties) { + this.streamErrors = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCommitWriteStreamsResponse commitTime. + * @member {google.protobuf.ITimestamp|null|undefined} commitTime + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @instance + */ + BatchCommitWriteStreamsResponse.prototype.commitTime = null; + + /** + * BatchCommitWriteStreamsResponse streamErrors. + * @member {Array.} streamErrors + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @instance + */ + BatchCommitWriteStreamsResponse.prototype.streamErrors = $util.emptyArray; + + /** + * Creates a new BatchCommitWriteStreamsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse instance + */ + BatchCommitWriteStreamsResponse.create = function create(properties) { + return new BatchCommitWriteStreamsResponse(properties); + }; + + /** + * Encodes the specified BatchCommitWriteStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCommitWriteStreamsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) + $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.streamErrors != null && message.streamErrors.length) + for (var i = 0; i < message.streamErrors.length; ++i) + $root.google.cloud.bigquery.storage.v1.StorageError.encode(message.streamErrors[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCommitWriteStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCommitWriteStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCommitWriteStreamsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + if (!(message.streamErrors && message.streamErrors.length)) + message.streamErrors = []; + message.streamErrors.push($root.google.cloud.bigquery.storage.v1.StorageError.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCommitWriteStreamsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCommitWriteStreamsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCommitWriteStreamsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCommitWriteStreamsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.commitTime != null && message.hasOwnProperty("commitTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTime); + if (error) + return "commitTime." + error; + } + if (message.streamErrors != null && message.hasOwnProperty("streamErrors")) { + if (!Array.isArray(message.streamErrors)) + return "streamErrors: array expected"; + for (var i = 0; i < message.streamErrors.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.StorageError.verify(message.streamErrors[i]); + if (error) + return "streamErrors." + error; + } + } + return null; + }; + + /** + * Creates a BatchCommitWriteStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} BatchCommitWriteStreamsResponse + */ + BatchCommitWriteStreamsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); + if (object.commitTime != null) { + if (typeof object.commitTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.commitTime: object expected"); + message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); + } + if (object.streamErrors) { + if (!Array.isArray(object.streamErrors)) + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: array expected"); + message.streamErrors = []; + for (var i = 0; i < object.streamErrors.length; ++i) { + if (typeof object.streamErrors[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse.streamErrors: object expected"); + message.streamErrors[i] = $root.google.cloud.bigquery.storage.v1.StorageError.fromObject(object.streamErrors[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCommitWriteStreamsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse} message BatchCommitWriteStreamsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCommitWriteStreamsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streamErrors = []; + if (options.defaults) + object.commitTime = null; + if (message.commitTime != null && message.hasOwnProperty("commitTime")) + object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); + if (message.streamErrors && message.streamErrors.length) { + object.streamErrors = []; + for (var j = 0; j < message.streamErrors.length; ++j) + object.streamErrors[j] = $root.google.cloud.bigquery.storage.v1.StorageError.toObject(message.streamErrors[j], options); + } + return object; + }; + + /** + * Converts this BatchCommitWriteStreamsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCommitWriteStreamsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCommitWriteStreamsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCommitWriteStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse"; + }; + + return BatchCommitWriteStreamsResponse; + })(); + + v1.FinalizeWriteStreamRequest = (function() { + + /** + * Properties of a FinalizeWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFinalizeWriteStreamRequest + * @property {string|null} [name] FinalizeWriteStreamRequest name + */ + + /** + * Constructs a new FinalizeWriteStreamRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FinalizeWriteStreamRequest. + * @implements IFinalizeWriteStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set + */ + function FinalizeWriteStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FinalizeWriteStreamRequest name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @instance + */ + FinalizeWriteStreamRequest.prototype.name = ""; + + /** + * Creates a new FinalizeWriteStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest instance + */ + FinalizeWriteStreamRequest.create = function create(properties) { + return new FinalizeWriteStreamRequest(properties); + }; + + /** + * Encodes the specified FinalizeWriteStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeWriteStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified FinalizeWriteStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest} message FinalizeWriteStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeWriteStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeWriteStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FinalizeWriteStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeWriteStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FinalizeWriteStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FinalizeWriteStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a FinalizeWriteStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} FinalizeWriteStreamRequest + */ + FinalizeWriteStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a FinalizeWriteStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest} message FinalizeWriteStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FinalizeWriteStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this FinalizeWriteStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @instance + * @returns {Object.} JSON object + */ + FinalizeWriteStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FinalizeWriteStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeWriteStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest"; + }; + + return FinalizeWriteStreamRequest; + })(); + + v1.FinalizeWriteStreamResponse = (function() { + + /** + * Properties of a FinalizeWriteStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFinalizeWriteStreamResponse + * @property {number|Long|null} [rowCount] FinalizeWriteStreamResponse rowCount + */ + + /** + * Constructs a new FinalizeWriteStreamResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FinalizeWriteStreamResponse. + * @implements IFinalizeWriteStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set + */ + function FinalizeWriteStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FinalizeWriteStreamResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @instance + */ + FinalizeWriteStreamResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new FinalizeWriteStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse instance + */ + FinalizeWriteStreamResponse.create = function create(properties) { + return new FinalizeWriteStreamResponse(properties); + }; + + /** + * Encodes the specified FinalizeWriteStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeWriteStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified FinalizeWriteStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse} message FinalizeWriteStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeWriteStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeWriteStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.rowCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FinalizeWriteStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeWriteStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FinalizeWriteStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FinalizeWriteStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates a FinalizeWriteStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} FinalizeWriteStreamResponse + */ + FinalizeWriteStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a FinalizeWriteStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse} message FinalizeWriteStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FinalizeWriteStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this FinalizeWriteStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @instance + * @returns {Object.} JSON object + */ + FinalizeWriteStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FinalizeWriteStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeWriteStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse"; + }; + + return FinalizeWriteStreamResponse; + })(); + + v1.FlushRowsRequest = (function() { + + /** + * Properties of a FlushRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFlushRowsRequest + * @property {string|null} [writeStream] FlushRowsRequest writeStream + * @property {google.protobuf.IInt64Value|null} [offset] FlushRowsRequest offset + */ + + /** + * Constructs a new FlushRowsRequest. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FlushRowsRequest. + * @implements IFlushRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set + */ + function FlushRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FlushRowsRequest writeStream. + * @member {string} writeStream + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @instance + */ + FlushRowsRequest.prototype.writeStream = ""; + + /** + * FlushRowsRequest offset. + * @member {google.protobuf.IInt64Value|null|undefined} offset + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @instance + */ + FlushRowsRequest.prototype.offset = null; + + /** + * Creates a new FlushRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest instance + */ + FlushRowsRequest.create = function create(properties) { + return new FlushRowsRequest(properties); + }; + + /** + * Encodes the specified FlushRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlushRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.writeStream != null && Object.hasOwnProperty.call(message, "writeStream")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.writeStream); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FlushRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsRequest} message FlushRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlushRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FlushRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlushRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.writeStream = reader.string(); + break; + } + case 2: { + message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FlushRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlushRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FlushRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FlushRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + if (!$util.isString(message.writeStream)) + return "writeStream: string expected"; + if (message.offset != null && message.hasOwnProperty("offset")) { + var error = $root.google.protobuf.Int64Value.verify(message.offset); + if (error) + return "offset." + error; + } + return null; + }; + + /** + * Creates a FlushRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.FlushRowsRequest} FlushRowsRequest + */ + FlushRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); + if (object.writeStream != null) + message.writeStream = String(object.writeStream); + if (object.offset != null) { + if (typeof object.offset !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.FlushRowsRequest.offset: object expected"); + message.offset = $root.google.protobuf.Int64Value.fromObject(object.offset); + } + return message; + }; + + /** + * Creates a plain object from a FlushRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1.FlushRowsRequest} message FlushRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FlushRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.writeStream = ""; + object.offset = null; + } + if (message.writeStream != null && message.hasOwnProperty("writeStream")) + object.writeStream = message.writeStream; + if (message.offset != null && message.hasOwnProperty("offset")) + object.offset = $root.google.protobuf.Int64Value.toObject(message.offset, options); + return object; + }; + + /** + * Converts this FlushRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @instance + * @returns {Object.} JSON object + */ + FlushRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FlushRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FlushRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FlushRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsRequest"; + }; + + return FlushRowsRequest; + })(); + + v1.FlushRowsResponse = (function() { + + /** + * Properties of a FlushRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IFlushRowsResponse + * @property {number|Long|null} [offset] FlushRowsResponse offset + */ + + /** + * Constructs a new FlushRowsResponse. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a FlushRowsResponse. + * @implements IFlushRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set + */ + function FlushRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FlushRowsResponse offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @instance + */ + FlushRowsResponse.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new FlushRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse instance + */ + FlushRowsResponse.create = function create(properties) { + return new FlushRowsResponse(properties); + }; + + /** + * Encodes the specified FlushRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlushRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.offset); + return writer; + }; + + /** + * Encodes the specified FlushRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.FlushRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.IFlushRowsResponse} message FlushRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlushRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FlushRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlushRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.offset = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FlushRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlushRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FlushRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FlushRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; + + /** + * Creates a FlushRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.FlushRowsResponse} FlushRowsResponse + */ + FlushRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.FlushRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a FlushRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1.FlushRowsResponse} message FlushRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FlushRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; + + /** + * Converts this FlushRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @instance + * @returns {Object.} JSON object + */ + FlushRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FlushRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.FlushRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FlushRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.FlushRowsResponse"; + }; + + return FlushRowsResponse; + })(); + + v1.StorageError = (function() { + + /** + * Properties of a StorageError. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IStorageError + * @property {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode|null} [code] StorageError code + * @property {string|null} [entity] StorageError entity + * @property {string|null} [errorMessage] StorageError errorMessage + */ + + /** + * Constructs a new StorageError. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a StorageError. + * @implements IStorageError + * @constructor + * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set + */ + function StorageError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StorageError code. + * @member {google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode} code + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @instance + */ + StorageError.prototype.code = 0; + + /** + * StorageError entity. + * @member {string} entity + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @instance + */ + StorageError.prototype.entity = ""; + + /** + * StorageError errorMessage. + * @member {string} errorMessage + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @instance + */ + StorageError.prototype.errorMessage = ""; + + /** + * Creates a new StorageError instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {google.cloud.bigquery.storage.v1.IStorageError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError instance + */ + StorageError.create = function create(properties) { + return new StorageError(properties); + }; + + /** + * Encodes the specified StorageError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); + if (message.entity != null && Object.hasOwnProperty.call(message, "entity")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.entity); + if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.errorMessage); + return writer; + }; + + /** + * Encodes the specified StorageError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.StorageError.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {google.cloud.bigquery.storage.v1.IStorageError} message StorageError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageError.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StorageError message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageError.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StorageError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.code = reader.int32(); + break; + } + case 2: { + message.entity = reader.string(); + break; + } + case 3: { + message.errorMessage = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StorageError message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageError.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StorageError message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StorageError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.code != null && message.hasOwnProperty("code")) + switch (message.code) { + default: + return "code: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + break; + } + if (message.entity != null && message.hasOwnProperty("entity")) + if (!$util.isString(message.entity)) + return "entity: string expected"; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + if (!$util.isString(message.errorMessage)) + return "errorMessage: string expected"; + return null; + }; + + /** + * Creates a StorageError message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.StorageError} StorageError + */ + StorageError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.StorageError) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.StorageError(); + switch (object.code) { + default: + if (typeof object.code === "number") { + message.code = object.code; + break; + } + break; + case "STORAGE_ERROR_CODE_UNSPECIFIED": + case 0: + message.code = 0; + break; + case "TABLE_NOT_FOUND": + case 1: + message.code = 1; + break; + case "STREAM_ALREADY_COMMITTED": + case 2: + message.code = 2; + break; + case "STREAM_NOT_FOUND": + case 3: + message.code = 3; + break; + case "INVALID_STREAM_TYPE": + case 4: + message.code = 4; + break; + case "INVALID_STREAM_STATE": + case 5: + message.code = 5; + break; + case "STREAM_FINALIZED": + case 6: + message.code = 6; + break; + case "SCHEMA_MISMATCH_EXTRA_FIELDS": + case 7: + message.code = 7; + break; + case "OFFSET_ALREADY_EXISTS": + case 8: + message.code = 8; + break; + case "OFFSET_OUT_OF_RANGE": + case 9: + message.code = 9; + break; + } + if (object.entity != null) + message.entity = String(object.entity); + if (object.errorMessage != null) + message.errorMessage = String(object.errorMessage); + return message; + }; + + /** + * Creates a plain object from a StorageError message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {google.cloud.bigquery.storage.v1.StorageError} message StorageError + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StorageError.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.code = options.enums === String ? "STORAGE_ERROR_CODE_UNSPECIFIED" : 0; + object.entity = ""; + object.errorMessage = ""; + } + if (message.code != null && message.hasOwnProperty("code")) + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode[message.code] : message.code; + if (message.entity != null && message.hasOwnProperty("entity")) + object.entity = message.entity; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + object.errorMessage = message.errorMessage; + return object; + }; + + /** + * Converts this StorageError to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @instance + * @returns {Object.} JSON object + */ + StorageError.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StorageError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.StorageError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StorageError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.StorageError"; + }; + + /** + * StorageErrorCode enum. + * @name google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode + * @enum {number} + * @property {number} STORAGE_ERROR_CODE_UNSPECIFIED=0 STORAGE_ERROR_CODE_UNSPECIFIED value + * @property {number} TABLE_NOT_FOUND=1 TABLE_NOT_FOUND value + * @property {number} STREAM_ALREADY_COMMITTED=2 STREAM_ALREADY_COMMITTED value + * @property {number} STREAM_NOT_FOUND=3 STREAM_NOT_FOUND value + * @property {number} INVALID_STREAM_TYPE=4 INVALID_STREAM_TYPE value + * @property {number} INVALID_STREAM_STATE=5 INVALID_STREAM_STATE value + * @property {number} STREAM_FINALIZED=6 STREAM_FINALIZED value + * @property {number} SCHEMA_MISMATCH_EXTRA_FIELDS=7 SCHEMA_MISMATCH_EXTRA_FIELDS value + * @property {number} OFFSET_ALREADY_EXISTS=8 OFFSET_ALREADY_EXISTS value + * @property {number} OFFSET_OUT_OF_RANGE=9 OFFSET_OUT_OF_RANGE value + */ + StorageError.StorageErrorCode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STORAGE_ERROR_CODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "TABLE_NOT_FOUND"] = 1; + values[valuesById[2] = "STREAM_ALREADY_COMMITTED"] = 2; + values[valuesById[3] = "STREAM_NOT_FOUND"] = 3; + values[valuesById[4] = "INVALID_STREAM_TYPE"] = 4; + values[valuesById[5] = "INVALID_STREAM_STATE"] = 5; + values[valuesById[6] = "STREAM_FINALIZED"] = 6; + values[valuesById[7] = "SCHEMA_MISMATCH_EXTRA_FIELDS"] = 7; + values[valuesById[8] = "OFFSET_ALREADY_EXISTS"] = 8; + values[valuesById[9] = "OFFSET_OUT_OF_RANGE"] = 9; + return values; + })(); + + return StorageError; + })(); + + v1.RowError = (function() { + + /** + * Properties of a RowError. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IRowError + * @property {number|Long|null} [index] RowError index + * @property {google.cloud.bigquery.storage.v1.RowError.RowErrorCode|null} [code] RowError code + * @property {string|null} [message] RowError message + */ + + /** + * Constructs a new RowError. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a RowError. + * @implements IRowError + * @constructor + * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set + */ + function RowError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * RowError index. + * @member {number|Long} index + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.index = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * RowError code. + * @member {google.cloud.bigquery.storage.v1.RowError.RowErrorCode} code + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.code = 0; + + /** + * RowError message. + * @member {string} message + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + */ + RowError.prototype.message = ""; + + /** + * Creates a new RowError instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError instance + */ + RowError.create = function create(properties) { + return new RowError(properties); + }; + + /** + * Encodes the specified RowError message. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RowError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.index != null && Object.hasOwnProperty.call(message, "index")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.index); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.code); + if (message.message != null && Object.hasOwnProperty.call(message, "message")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.message); + return writer; + }; + + /** + * Encodes the specified RowError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.RowError.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.IRowError} message RowError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RowError.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a RowError message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RowError.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.RowError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.index = reader.int64(); + break; + } + case 2: { + message.code = reader.int32(); + break; + } + case 3: { + message.message = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a RowError message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RowError.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a RowError message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + RowError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.index != null && message.hasOwnProperty("index")) + if (!$util.isInteger(message.index) && !(message.index && $util.isInteger(message.index.low) && $util.isInteger(message.index.high))) + return "index: integer|Long expected"; + if (message.code != null && message.hasOwnProperty("code")) + switch (message.code) { + default: + return "code: enum value expected"; + case 0: + case 1: + break; + } + if (message.message != null && message.hasOwnProperty("message")) + if (!$util.isString(message.message)) + return "message: string expected"; + return null; + }; + + /** + * Creates a RowError message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.RowError} RowError + */ + RowError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.RowError) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.RowError(); + if (object.index != null) + if ($util.Long) + (message.index = $util.Long.fromValue(object.index)).unsigned = false; + else if (typeof object.index === "string") + message.index = parseInt(object.index, 10); + else if (typeof object.index === "number") + message.index = object.index; + else if (typeof object.index === "object") + message.index = new $util.LongBits(object.index.low >>> 0, object.index.high >>> 0).toNumber(); + switch (object.code) { + default: + if (typeof object.code === "number") { + message.code = object.code; + break; + } + break; + case "ROW_ERROR_CODE_UNSPECIFIED": + case 0: + message.code = 0; + break; + case "FIELDS_ERROR": + case 1: + message.code = 1; + break; + } + if (object.message != null) + message.message = String(object.message); + return message; + }; + + /** + * Creates a plain object from a RowError message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {google.cloud.bigquery.storage.v1.RowError} message RowError + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + RowError.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.index = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.index = options.longs === String ? "0" : 0; + object.code = options.enums === String ? "ROW_ERROR_CODE_UNSPECIFIED" : 0; + object.message = ""; + } + if (message.index != null && message.hasOwnProperty("index")) + if (typeof message.index === "number") + object.index = options.longs === String ? String(message.index) : message.index; + else + object.index = options.longs === String ? $util.Long.prototype.toString.call(message.index) : options.longs === Number ? new $util.LongBits(message.index.low >>> 0, message.index.high >>> 0).toNumber() : message.index; + if (message.code != null && message.hasOwnProperty("code")) + object.code = options.enums === String ? $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] === undefined ? message.code : $root.google.cloud.bigquery.storage.v1.RowError.RowErrorCode[message.code] : message.code; + if (message.message != null && message.hasOwnProperty("message")) + object.message = message.message; + return object; + }; + + /** + * Converts this RowError to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.RowError + * @instance + * @returns {Object.} JSON object + */ + RowError.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for RowError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.RowError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + RowError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.RowError"; + }; + + /** + * RowErrorCode enum. + * @name google.cloud.bigquery.storage.v1.RowError.RowErrorCode + * @enum {number} + * @property {number} ROW_ERROR_CODE_UNSPECIFIED=0 ROW_ERROR_CODE_UNSPECIFIED value + * @property {number} FIELDS_ERROR=1 FIELDS_ERROR value + */ + RowError.RowErrorCode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ROW_ERROR_CODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "FIELDS_ERROR"] = 1; + return values; + })(); + + return RowError; + })(); + + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1.DataFormat + * @enum {number} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=2 ARROW value + */ + v1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[2] = "ARROW"] = 2; + return values; + })(); + + v1.ReadSession = (function() { + + /** + * Properties of a ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1.DataFormat|null} [dataFormat] ReadSession dataFormat + * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {string|null} [table] ReadSession table + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions + * @property {Array.|null} [streams] ReadSession streams + * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned + * @property {string|null} [traceId] ReadSession traceId + */ + + /** + * Constructs a new ReadSession. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadSession. + * @implements IReadSession + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + */ + function ReadSession(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.name = ""; + + /** + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.expireTime = null; + + /** + * ReadSession dataFormat. + * @member {google.cloud.bigquery.storage.v1.DataFormat} dataFormat + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.dataFormat = 0; + + /** + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession table. + * @member {string} table + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.table = ""; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession readOptions. + * @member {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.readOptions = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + /** + * ReadSession estimatedTotalBytesScanned. + * @member {number|Long} estimatedTotalBytesScanned + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadSession traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.traceId = ""; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession instance + */ + ReadSession.create = function create(properties) { + return new ReadSession(properties); + }; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.dataFormat != null && Object.hasOwnProperty.call(message, "dataFormat")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.dataFormat); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.table != null && Object.hasOwnProperty.call(message, "table")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.table); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) + $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1.ReadStream.encode(message.streams[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.estimatedTotalBytesScanned != null && Object.hasOwnProperty.call(message, "estimatedTotalBytesScanned")) + writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 13, wireType 2 =*/106).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 3: { + message.dataFormat = reader.int32(); + break; + } + case 4: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 5: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + case 6: { + message.table = reader.string(); + break; + } + case 7: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 8: { + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.decode(reader, reader.uint32()); + break; + } + case 10: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32())); + break; + } + case 12: { + message.estimatedTotalBytesScanned = reader.int64(); + break; + } + case 13: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadSession message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadSession.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); + if (error) + return "expireTime." + error; + } + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + switch (message.dataFormat) { + default: + return "dataFormat: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.table != null && message.hasOwnProperty("table")) + if (!$util.isString(message.table)) + return "table: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.ReadStream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) + return "estimatedTotalBytesScanned: integer|Long expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession} ReadSession + */ + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + } + switch (object.dataFormat) { + default: + if (typeof object.dataFormat === "number") { + message.dataFormat = object.dataFormat; + break; + } + break; + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.dataFormat = 0; + break; + case "AVRO": + case 1: + message.dataFormat = 1; + break; + case "ARROW": + case 2: + message.dataFormat = 2; + break; + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.table != null) + message.table = String(object.table); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.fromObject(object.tableModifiers); + } + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.fromObject(object.readOptions); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1.ReadStream.fromObject(object.streams[i]); + } + } + if (object.estimatedTotalBytesScanned != null) + if ($util.Long) + (message.estimatedTotalBytesScanned = $util.Long.fromValue(object.estimatedTotalBytesScanned)).unsigned = false; + else if (typeof object.estimatedTotalBytesScanned === "string") + message.estimatedTotalBytesScanned = parseInt(object.estimatedTotalBytesScanned, 10); + else if (typeof object.estimatedTotalBytesScanned === "number") + message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; + else if (typeof object.estimatedTotalBytesScanned === "object") + message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession} message ReadSession + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadSession.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (options.defaults) { + object.name = ""; + object.expireTime = null; + object.dataFormat = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.table = ""; + object.tableModifiers = null; + object.readOptions = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedTotalBytesScanned = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; + object.traceId = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.dataFormat != null && message.hasOwnProperty("dataFormat")) + object.dataFormat = options.enums === String ? $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] === undefined ? message.dataFormat : $root.google.cloud.bigquery.storage.v1.DataFormat[message.dataFormat] : message.dataFormat; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.table != null && message.hasOwnProperty("table")) + object.table = message.table; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.toObject(message.tableModifiers, options); + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.toObject(message.readOptions, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1.ReadStream.toObject(message.streams[j], options); + } + if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) + if (typeof message.estimatedTotalBytesScanned === "number") + object.estimatedTotalBytesScanned = options.longs === String ? String(message.estimatedTotalBytesScanned) : message.estimatedTotalBytesScanned; + else + object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this ReadSession to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + * @returns {Object.} JSON object + */ + ReadSession.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadSession + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession"; + }; + + ReadSession.TableModifiers = (function() { + + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ + + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; + + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; + + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableModifiers + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableModifiers + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"; + }; + + return TableModifiers; + })(); + + ReadSession.TableReadOptions = (function() { + + /** + * Properties of a TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions + * @property {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null} [avroSerializationOptions] TableReadOptions avroSerializationOptions + */ + + /** + * Constructs a new TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + */ + function TableReadOptions(properties) { + this.selectedFields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.selectedFields = $util.emptyArray; + + /** + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; + + /** + * TableReadOptions arrowSerializationOptions. + * @member {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null|undefined} arrowSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.arrowSerializationOptions = null; + + /** + * TableReadOptions avroSerializationOptions. + * @member {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null|undefined} avroSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.avroSerializationOptions = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * TableReadOptions outputFormatSerializationOptions. + * @member {"arrowSerializationOptions"|"avroSerializationOptions"|undefined} outputFormatSerializationOptions + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + Object.defineProperty(TableReadOptions.prototype, "outputFormatSerializationOptions", { + get: $util.oneOfGetter($oneOfFields = ["arrowSerializationOptions", "avroSerializationOptions"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions instance + */ + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); + }; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + if (message.arrowSerializationOptions != null && Object.hasOwnProperty.call(message, "arrowSerializationOptions")) + $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.avroSerializationOptions != null && Object.hasOwnProperty.call(message, "avroSerializationOptions")) + $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.encode(message.avroSerializationOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + } + case 2: { + message.rowRestriction = reader.string(); + break; + } + case 3: { + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.decode(reader, reader.uint32()); + break; + } + case 4: { + message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReadOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReadOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + properties.outputFormatSerializationOptions = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.verify(message.arrowSerializationOptions); + if (error) + return "arrowSerializationOptions." + error; + } + } + if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { + if (properties.outputFormatSerializationOptions === 1) + return "outputFormatSerializationOptions: multiple values"; + properties.outputFormatSerializationOptions = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.verify(message.avroSerializationOptions); + if (error) + return "avroSerializationOptions." + error; + } + } + return null; + }; + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} TableReadOptions + */ + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); + } + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); + if (object.arrowSerializationOptions != null) { + if (typeof object.arrowSerializationOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.arrowSerializationOptions: object expected"); + message.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.fromObject(object.arrowSerializationOptions); + } + if (object.avroSerializationOptions != null) { + if (typeof object.avroSerializationOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.avroSerializationOptions: object expected"); + message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.fromObject(object.avroSerializationOptions); + } + return message; + }; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions} message TableReadOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReadOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; + if (message.arrowSerializationOptions != null && message.hasOwnProperty("arrowSerializationOptions")) { + object.arrowSerializationOptions = $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.toObject(message.arrowSerializationOptions, options); + if (options.oneofs) + object.outputFormatSerializationOptions = "arrowSerializationOptions"; + } + if (message.avroSerializationOptions != null && message.hasOwnProperty("avroSerializationOptions")) { + object.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.toObject(message.avroSerializationOptions, options); + if (options.oneofs) + object.outputFormatSerializationOptions = "avroSerializationOptions"; + } + return object; + }; + + /** + * Converts this TableReadOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + * @returns {Object.} JSON object + */ + TableReadOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableReadOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"; + }; + + return TableReadOptions; + })(); + + return ReadSession; + })(); + + v1.ReadStream = (function() { + + /** + * Properties of a ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IReadStream + * @property {string|null} [name] ReadStream name + */ + + /** + * Constructs a new ReadStream. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a ReadStream. + * @implements IReadStream + * @constructor + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + */ + function ReadStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @instance + */ + ReadStream.prototype.name = ""; + + /** + * Creates a new ReadStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream instance + */ + ReadStream.create = function create(properties) { + return new ReadStream(properties); + }; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.ReadStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.ReadStream} ReadStream + */ + ReadStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.ReadStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1.ReadStream} message ReadStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this ReadStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @instance + * @returns {Object.} JSON object + */ + ReadStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.ReadStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadStream"; + }; + + return ReadStream; + })(); + + /** + * WriteStreamView enum. + * @name google.cloud.bigquery.storage.v1.WriteStreamView + * @enum {number} + * @property {number} WRITE_STREAM_VIEW_UNSPECIFIED=0 WRITE_STREAM_VIEW_UNSPECIFIED value + * @property {number} BASIC=1 BASIC value + * @property {number} FULL=2 FULL value + */ + v1.WriteStreamView = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "WRITE_STREAM_VIEW_UNSPECIFIED"] = 0; + values[valuesById[1] = "BASIC"] = 1; + values[valuesById[2] = "FULL"] = 2; + return values; + })(); + + v1.WriteStream = (function() { + + /** + * Properties of a WriteStream. + * @memberof google.cloud.bigquery.storage.v1 + * @interface IWriteStream + * @property {string|null} [name] WriteStream name + * @property {google.cloud.bigquery.storage.v1.WriteStream.Type|null} [type] WriteStream type + * @property {google.protobuf.ITimestamp|null} [createTime] WriteStream createTime + * @property {google.protobuf.ITimestamp|null} [commitTime] WriteStream commitTime + * @property {google.cloud.bigquery.storage.v1.ITableSchema|null} [tableSchema] WriteStream tableSchema + * @property {google.cloud.bigquery.storage.v1.WriteStream.WriteMode|null} [writeMode] WriteStream writeMode + * @property {string|null} [location] WriteStream location + */ + + /** + * Constructs a new WriteStream. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a WriteStream. + * @implements IWriteStream + * @constructor + * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set + */ + function WriteStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * WriteStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.name = ""; + + /** + * WriteStream type. + * @member {google.cloud.bigquery.storage.v1.WriteStream.Type} type + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.type = 0; + + /** + * WriteStream createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.createTime = null; + + /** + * WriteStream commitTime. + * @member {google.protobuf.ITimestamp|null|undefined} commitTime + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.commitTime = null; + + /** + * WriteStream tableSchema. + * @member {google.cloud.bigquery.storage.v1.ITableSchema|null|undefined} tableSchema + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.tableSchema = null; + + /** + * WriteStream writeMode. + * @member {google.cloud.bigquery.storage.v1.WriteStream.WriteMode} writeMode + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.writeMode = 0; + + /** + * WriteStream location. + * @member {string} location + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + */ + WriteStream.prototype.location = ""; + + /** + * Creates a new WriteStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {google.cloud.bigquery.storage.v1.IWriteStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream instance + */ + WriteStream.create = function create(properties) { + return new WriteStream(properties); + }; + + /** + * Encodes the specified WriteStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + WriteStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.commitTime != null && Object.hasOwnProperty.call(message, "commitTime")) + $root.google.protobuf.Timestamp.encode(message.commitTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.tableSchema != null && Object.hasOwnProperty.call(message, "tableSchema")) + $root.google.cloud.bigquery.storage.v1.TableSchema.encode(message.tableSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.writeMode != null && Object.hasOwnProperty.call(message, "writeMode")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.writeMode); + if (message.location != null && Object.hasOwnProperty.call(message, "location")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.location); + return writer; + }; + + /** + * Encodes the specified WriteStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.WriteStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {google.cloud.bigquery.storage.v1.IWriteStream} message WriteStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + WriteStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a WriteStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + WriteStream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.int32(); + break; + } + case 3: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 4: { + message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 5: { + message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.decode(reader, reader.uint32()); + break; + } + case 7: { + message.writeMode = reader.int32(); + break; + } + case 8: { + message.location = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a WriteStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + WriteStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a WriteStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + WriteStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.createTime); + if (error) + return "createTime." + error; + } + if (message.commitTime != null && message.hasOwnProperty("commitTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTime); + if (error) + return "commitTime." + error; + } + if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.TableSchema.verify(message.tableSchema); + if (error) + return "tableSchema." + error; + } + if (message.writeMode != null && message.hasOwnProperty("writeMode")) + switch (message.writeMode) { + default: + return "writeMode: enum value expected"; + case 0: + case 1: + break; + } + if (message.location != null && message.hasOwnProperty("location")) + if (!$util.isString(message.location)) + return "location: string expected"; + return null; + }; + + /** + * Creates a WriteStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.WriteStream} WriteStream + */ + WriteStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.WriteStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); + if (object.name != null) + message.name = String(object.name); + switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; + case "TYPE_UNSPECIFIED": + case 0: + message.type = 0; + break; + case "COMMITTED": + case 1: + message.type = 1; + break; + case "PENDING": + case 2: + message.type = 2; + break; + case "BUFFERED": + case 3: + message.type = 3; + break; + } + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.commitTime != null) { + if (typeof object.commitTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.commitTime: object expected"); + message.commitTime = $root.google.protobuf.Timestamp.fromObject(object.commitTime); + } + if (object.tableSchema != null) { + if (typeof object.tableSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.WriteStream.tableSchema: object expected"); + message.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.fromObject(object.tableSchema); + } + switch (object.writeMode) { + default: + if (typeof object.writeMode === "number") { + message.writeMode = object.writeMode; + break; + } + break; + case "WRITE_MODE_UNSPECIFIED": + case 0: + message.writeMode = 0; + break; + case "INSERT": + case 1: + message.writeMode = 1; + break; + } + if (object.location != null) + message.location = String(object.location); + return message; + }; + + /** + * Creates a plain object from a WriteStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {google.cloud.bigquery.storage.v1.WriteStream} message WriteStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + WriteStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; + object.createTime = null; + object.commitTime = null; + object.tableSchema = null; + object.writeMode = options.enums === String ? "WRITE_MODE_UNSPECIFIED" : 0; + object.location = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.WriteStream.Type[message.type] : message.type; + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.commitTime != null && message.hasOwnProperty("commitTime")) + object.commitTime = $root.google.protobuf.Timestamp.toObject(message.commitTime, options); + if (message.tableSchema != null && message.hasOwnProperty("tableSchema")) + object.tableSchema = $root.google.cloud.bigquery.storage.v1.TableSchema.toObject(message.tableSchema, options); + if (message.writeMode != null && message.hasOwnProperty("writeMode")) + object.writeMode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] === undefined ? message.writeMode : $root.google.cloud.bigquery.storage.v1.WriteStream.WriteMode[message.writeMode] : message.writeMode; + if (message.location != null && message.hasOwnProperty("location")) + object.location = message.location; + return object; + }; + + /** + * Converts this WriteStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @instance + * @returns {Object.} JSON object + */ + WriteStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for WriteStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.WriteStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + WriteStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.WriteStream"; + }; + + /** + * Type enum. + * @name google.cloud.bigquery.storage.v1.WriteStream.Type + * @enum {number} + * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value + * @property {number} COMMITTED=1 COMMITTED value + * @property {number} PENDING=2 PENDING value + * @property {number} BUFFERED=3 BUFFERED value + */ + WriteStream.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; + values[valuesById[1] = "COMMITTED"] = 1; + values[valuesById[2] = "PENDING"] = 2; + values[valuesById[3] = "BUFFERED"] = 3; + return values; + })(); + + /** + * WriteMode enum. + * @name google.cloud.bigquery.storage.v1.WriteStream.WriteMode + * @enum {number} + * @property {number} WRITE_MODE_UNSPECIFIED=0 WRITE_MODE_UNSPECIFIED value + * @property {number} INSERT=1 INSERT value + */ + WriteStream.WriteMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "WRITE_MODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "INSERT"] = 1; + return values; + })(); + + return WriteStream; + })(); + + v1.TableSchema = (function() { + + /** + * Properties of a TableSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ITableSchema + * @property {Array.|null} [fields] TableSchema fields + */ + + /** + * Constructs a new TableSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a TableSchema. + * @implements ITableSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set + */ + function TableSchema(properties) { + this.fields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableSchema fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @instance + */ + TableSchema.prototype.fields = $util.emptyArray; + + /** + * Creates a new TableSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema instance + */ + TableSchema.create = function create(properties) { + return new TableSchema(properties); + }; + + /** + * Encodes the specified TableSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableSchema} message TableSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a TableSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.TableSchema} TableSchema + */ + TableSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.TableSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableSchema.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a TableSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {google.cloud.bigquery.storage.v1.TableSchema} message TableSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.fields = []; + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); + } + return object; + }; + + /** + * Converts this TableSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @instance + * @returns {Object.} JSON object + */ + TableSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.TableSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableSchema"; + }; + + return TableSchema; + })(); + + v1.TableFieldSchema = (function() { + + /** + * Properties of a TableFieldSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @interface ITableFieldSchema + * @property {string|null} [name] TableFieldSchema name + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null} [type] TableFieldSchema type + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode|null} [mode] TableFieldSchema mode + * @property {Array.|null} [fields] TableFieldSchema fields + * @property {string|null} [description] TableFieldSchema description + * @property {number|Long|null} [maxLength] TableFieldSchema maxLength + * @property {number|Long|null} [precision] TableFieldSchema precision + * @property {number|Long|null} [scale] TableFieldSchema scale + */ + + /** + * Constructs a new TableFieldSchema. + * @memberof google.cloud.bigquery.storage.v1 + * @classdesc Represents a TableFieldSchema. + * @implements ITableFieldSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set + */ + function TableFieldSchema(properties) { + this.fields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableFieldSchema name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.name = ""; + + /** + * TableFieldSchema type. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Type} type + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.type = 0; + + /** + * TableFieldSchema mode. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Mode} mode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.mode = 0; + + /** + * TableFieldSchema fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.fields = $util.emptyArray; + + /** + * TableFieldSchema description. + * @member {string} description + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.description = ""; + + /** + * TableFieldSchema maxLength. + * @member {number|Long} maxLength + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.maxLength = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * TableFieldSchema precision. + * @member {number|Long} precision + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.precision = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * TableFieldSchema scale. + * @member {number|Long} scale + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.scale = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new TableFieldSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema instance + */ + TableFieldSchema.create = function create(properties) { + return new TableFieldSchema(properties); + }; + + /** + * Encodes the specified TableFieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableFieldSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.type); + if (message.mode != null && Object.hasOwnProperty.call(message, "mode")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.mode); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1.TableFieldSchema.encode(message.fields[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.description != null && Object.hasOwnProperty.call(message, "description")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.description); + if (message.maxLength != null && Object.hasOwnProperty.call(message, "maxLength")) + writer.uint32(/* id 7, wireType 0 =*/56).int64(message.maxLength); + if (message.precision != null && Object.hasOwnProperty.call(message, "precision")) + writer.uint32(/* id 8, wireType 0 =*/64).int64(message.precision); + if (message.scale != null && Object.hasOwnProperty.call(message, "scale")) + writer.uint32(/* id 9, wireType 0 =*/72).int64(message.scale); + return writer; + }; + + /** + * Encodes the specified TableFieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1.ITableFieldSchema} message TableFieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableFieldSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableFieldSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableFieldSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.int32(); + break; + } + case 3: { + message.mode = reader.int32(); + break; + } + case 4: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1.TableFieldSchema.decode(reader, reader.uint32())); + break; + } + case 6: { + message.description = reader.string(); + break; + } + case 7: { + message.maxLength = reader.int64(); + break; + } + case 8: { + message.precision = reader.int64(); + break; + } + case 9: { + message.scale = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableFieldSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableFieldSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableFieldSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableFieldSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + break; + } + if (message.mode != null && message.hasOwnProperty("mode")) + switch (message.mode) { + default: + return "mode: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } + } + if (message.description != null && message.hasOwnProperty("description")) + if (!$util.isString(message.description)) + return "description: string expected"; + if (message.maxLength != null && message.hasOwnProperty("maxLength")) + if (!$util.isInteger(message.maxLength) && !(message.maxLength && $util.isInteger(message.maxLength.low) && $util.isInteger(message.maxLength.high))) + return "maxLength: integer|Long expected"; + if (message.precision != null && message.hasOwnProperty("precision")) + if (!$util.isInteger(message.precision) && !(message.precision && $util.isInteger(message.precision.low) && $util.isInteger(message.precision.high))) + return "precision: integer|Long expected"; + if (message.scale != null && message.hasOwnProperty("scale")) + if (!$util.isInteger(message.scale) && !(message.scale && $util.isInteger(message.scale.low) && $util.isInteger(message.scale.high))) + return "scale: integer|Long expected"; + return null; + }; + + /** + * Creates a TableFieldSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema} TableFieldSchema + */ + TableFieldSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.TableFieldSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); + if (object.name != null) + message.name = String(object.name); + switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; + case "TYPE_UNSPECIFIED": + case 0: + message.type = 0; + break; + case "STRING": + case 1: + message.type = 1; + break; + case "INT64": + case 2: + message.type = 2; + break; + case "DOUBLE": + case 3: + message.type = 3; + break; + case "STRUCT": + case 4: + message.type = 4; + break; + case "BYTES": + case 5: + message.type = 5; + break; + case "BOOL": + case 6: + message.type = 6; + break; + case "TIMESTAMP": + case 7: + message.type = 7; + break; + case "DATE": + case 8: + message.type = 8; + break; + case "TIME": + case 9: + message.type = 9; + break; + case "DATETIME": + case 10: + message.type = 10; + break; + case "GEOGRAPHY": + case 11: + message.type = 11; + break; + case "NUMERIC": + case 12: + message.type = 12; + break; + case "BIGNUMERIC": + case 13: + message.type = 13; + break; + case "INTERVAL": + case 14: + message.type = 14; + break; + case "JSON": + case 15: + message.type = 15; + break; + } + switch (object.mode) { + default: + if (typeof object.mode === "number") { + message.mode = object.mode; + break; + } + break; + case "MODE_UNSPECIFIED": + case 0: + message.mode = 0; + break; + case "NULLABLE": + case 1: + message.mode = 1; + break; + case "REQUIRED": + case 2: + message.mode = 2; + break; + case "REPEATED": + case 3: + message.mode = 3; + break; + } + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.fromObject(object.fields[i]); + } + } + if (object.description != null) + message.description = String(object.description); + if (object.maxLength != null) + if ($util.Long) + (message.maxLength = $util.Long.fromValue(object.maxLength)).unsigned = false; + else if (typeof object.maxLength === "string") + message.maxLength = parseInt(object.maxLength, 10); + else if (typeof object.maxLength === "number") + message.maxLength = object.maxLength; + else if (typeof object.maxLength === "object") + message.maxLength = new $util.LongBits(object.maxLength.low >>> 0, object.maxLength.high >>> 0).toNumber(); + if (object.precision != null) + if ($util.Long) + (message.precision = $util.Long.fromValue(object.precision)).unsigned = false; + else if (typeof object.precision === "string") + message.precision = parseInt(object.precision, 10); + else if (typeof object.precision === "number") + message.precision = object.precision; + else if (typeof object.precision === "object") + message.precision = new $util.LongBits(object.precision.low >>> 0, object.precision.high >>> 0).toNumber(); + if (object.scale != null) + if ($util.Long) + (message.scale = $util.Long.fromValue(object.scale)).unsigned = false; + else if (typeof object.scale === "string") + message.scale = parseInt(object.scale, 10); + else if (typeof object.scale === "number") + message.scale = object.scale; + else if (typeof object.scale === "object") + message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a TableFieldSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema} message TableFieldSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableFieldSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.fields = []; + if (options.defaults) { + object.name = ""; + object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; + object.mode = options.enums === String ? "MODE_UNSPECIFIED" : 0; + object.description = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.maxLength = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.maxLength = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.precision = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.precision = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.scale = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.scale = options.longs === String ? "0" : 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; + if (message.mode != null && message.hasOwnProperty("mode")) + object.mode = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] === undefined ? message.mode : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode[message.mode] : message.mode; + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.toObject(message.fields[j], options); + } + if (message.description != null && message.hasOwnProperty("description")) + object.description = message.description; + if (message.maxLength != null && message.hasOwnProperty("maxLength")) + if (typeof message.maxLength === "number") + object.maxLength = options.longs === String ? String(message.maxLength) : message.maxLength; + else + object.maxLength = options.longs === String ? $util.Long.prototype.toString.call(message.maxLength) : options.longs === Number ? new $util.LongBits(message.maxLength.low >>> 0, message.maxLength.high >>> 0).toNumber() : message.maxLength; + if (message.precision != null && message.hasOwnProperty("precision")) + if (typeof message.precision === "number") + object.precision = options.longs === String ? String(message.precision) : message.precision; + else + object.precision = options.longs === String ? $util.Long.prototype.toString.call(message.precision) : options.longs === Number ? new $util.LongBits(message.precision.low >>> 0, message.precision.high >>> 0).toNumber() : message.precision; + if (message.scale != null && message.hasOwnProperty("scale")) + if (typeof message.scale === "number") + object.scale = options.longs === String ? String(message.scale) : message.scale; + else + object.scale = options.longs === String ? $util.Long.prototype.toString.call(message.scale) : options.longs === Number ? new $util.LongBits(message.scale.low >>> 0, message.scale.high >>> 0).toNumber() : message.scale; + return object; + }; + + /** + * Converts this TableFieldSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + * @returns {Object.} JSON object + */ + TableFieldSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableFieldSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableFieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableFieldSchema"; + }; + + /** + * Type enum. + * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Type + * @enum {number} + * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value + * @property {number} STRING=1 STRING value + * @property {number} INT64=2 INT64 value + * @property {number} DOUBLE=3 DOUBLE value + * @property {number} STRUCT=4 STRUCT value + * @property {number} BYTES=5 BYTES value + * @property {number} BOOL=6 BOOL value + * @property {number} TIMESTAMP=7 TIMESTAMP value + * @property {number} DATE=8 DATE value + * @property {number} TIME=9 TIME value + * @property {number} DATETIME=10 DATETIME value + * @property {number} GEOGRAPHY=11 GEOGRAPHY value + * @property {number} NUMERIC=12 NUMERIC value + * @property {number} BIGNUMERIC=13 BIGNUMERIC value + * @property {number} INTERVAL=14 INTERVAL value + * @property {number} JSON=15 JSON value + */ + TableFieldSchema.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "TYPE_UNSPECIFIED"] = 0; + values[valuesById[1] = "STRING"] = 1; + values[valuesById[2] = "INT64"] = 2; + values[valuesById[3] = "DOUBLE"] = 3; + values[valuesById[4] = "STRUCT"] = 4; + values[valuesById[5] = "BYTES"] = 5; + values[valuesById[6] = "BOOL"] = 6; + values[valuesById[7] = "TIMESTAMP"] = 7; + values[valuesById[8] = "DATE"] = 8; + values[valuesById[9] = "TIME"] = 9; + values[valuesById[10] = "DATETIME"] = 10; + values[valuesById[11] = "GEOGRAPHY"] = 11; + values[valuesById[12] = "NUMERIC"] = 12; + values[valuesById[13] = "BIGNUMERIC"] = 13; + values[valuesById[14] = "INTERVAL"] = 14; + values[valuesById[15] = "JSON"] = 15; + return values; + })(); + + /** + * Mode enum. + * @name google.cloud.bigquery.storage.v1.TableFieldSchema.Mode + * @enum {number} + * @property {number} MODE_UNSPECIFIED=0 MODE_UNSPECIFIED value + * @property {number} NULLABLE=1 NULLABLE value + * @property {number} REQUIRED=2 REQUIRED value + * @property {number} REPEATED=3 REPEATED value + */ + TableFieldSchema.Mode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MODE_UNSPECIFIED"] = 0; + values[valuesById[1] = "NULLABLE"] = 1; + values[valuesById[2] = "REQUIRED"] = 2; + values[valuesById[3] = "REPEATED"] = 3; + return values; + })(); + + return TableFieldSchema; + })(); + + return v1; + })(); + + storage.v1beta1 = (function() { + + /** + * Namespace v1beta1. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1beta1 = {}; + + v1beta1.ArrowSchema = (function() { + + /** + * Properties of an ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IArrowSchema + * @property {Uint8Array|null} [serializedSchema] ArrowSchema serializedSchema + */ + + /** + * Constructs a new ArrowSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an ArrowSchema. + * @implements IArrowSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + */ + function ArrowSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowSchema serializedSchema. + * @member {Uint8Array} serializedSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @instance + */ + ArrowSchema.prototype.serializedSchema = $util.newBuffer([]); + + /** + * Creates a new ArrowSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema instance + */ + ArrowSchema.create = function create(properties) { + return new ArrowSchema(properties); + }; + + /** + * Encodes the specified ArrowSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedSchema != null && Object.hasOwnProperty.call(message, "serializedSchema")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedSchema); + return writer; + }; + + /** + * Encodes the specified ArrowSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowSchema} message ArrowSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedSchema = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + if (!(message.serializedSchema && typeof message.serializedSchema.length === "number" || $util.isString(message.serializedSchema))) + return "serializedSchema: buffer expected"; + return null; + }; + + /** + * Creates an ArrowSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowSchema} ArrowSchema + */ + ArrowSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); + if (object.serializedSchema != null) + if (typeof object.serializedSchema === "string") + $util.base64.decode(object.serializedSchema, message.serializedSchema = $util.newBuffer($util.base64.length(object.serializedSchema)), 0); + else if (object.serializedSchema.length >= 0) + message.serializedSchema = object.serializedSchema; + return message; + }; + + /** + * Creates a plain object from an ArrowSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ArrowSchema} message ArrowSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if (options.bytes === String) + object.serializedSchema = ""; + else { + object.serializedSchema = []; + if (options.bytes !== Array) + object.serializedSchema = $util.newBuffer(object.serializedSchema); + } + if (message.serializedSchema != null && message.hasOwnProperty("serializedSchema")) + object.serializedSchema = options.bytes === String ? $util.base64.encode(message.serializedSchema, 0, message.serializedSchema.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedSchema) : message.serializedSchema; + return object; + }; + + /** + * Converts this ArrowSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @instance + * @returns {Object.} JSON object + */ + ArrowSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowSchema"; + }; + + return ArrowSchema; + })(); + + v1beta1.ArrowRecordBatch = (function() { + + /** + * Properties of an ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IArrowRecordBatch + * @property {Uint8Array|null} [serializedRecordBatch] ArrowRecordBatch serializedRecordBatch + * @property {number|Long|null} [rowCount] ArrowRecordBatch rowCount + */ + + /** + * Constructs a new ArrowRecordBatch. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an ArrowRecordBatch. + * @implements IArrowRecordBatch + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + */ + function ArrowRecordBatch(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowRecordBatch serializedRecordBatch. + * @member {Uint8Array} serializedRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.serializedRecordBatch = $util.newBuffer([]); + + /** + * ArrowRecordBatch rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + */ + ArrowRecordBatch.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ArrowRecordBatch instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch instance + */ + ArrowRecordBatch.create = function create(properties) { + return new ArrowRecordBatch(properties); + }; + + /** + * Encodes the specified ArrowRecordBatch message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedRecordBatch != null && Object.hasOwnProperty.call(message, "serializedRecordBatch")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedRecordBatch); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ArrowRecordBatch message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch} message ArrowRecordBatch message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowRecordBatch.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedRecordBatch = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowRecordBatch message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowRecordBatch.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowRecordBatch message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowRecordBatch.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + if (!(message.serializedRecordBatch && typeof message.serializedRecordBatch.length === "number" || $util.isString(message.serializedRecordBatch))) + return "serializedRecordBatch: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an ArrowRecordBatch message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} ArrowRecordBatch + */ + ArrowRecordBatch.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); + if (object.serializedRecordBatch != null) + if (typeof object.serializedRecordBatch === "string") + $util.base64.decode(object.serializedRecordBatch, message.serializedRecordBatch = $util.newBuffer($util.base64.length(object.serializedRecordBatch)), 0); + else if (object.serializedRecordBatch.length >= 0) + message.serializedRecordBatch = object.serializedRecordBatch; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an ArrowRecordBatch message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch} message ArrowRecordBatch + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowRecordBatch.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedRecordBatch = ""; + else { + object.serializedRecordBatch = []; + if (options.bytes !== Array) + object.serializedRecordBatch = $util.newBuffer(object.serializedRecordBatch); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedRecordBatch != null && message.hasOwnProperty("serializedRecordBatch")) + object.serializedRecordBatch = options.bytes === String ? $util.base64.encode(message.serializedRecordBatch, 0, message.serializedRecordBatch.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedRecordBatch) : message.serializedRecordBatch; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ArrowRecordBatch to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @instance + * @returns {Object.} JSON object + */ + ArrowRecordBatch.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowRecordBatch + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowRecordBatch.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch"; + }; + + return ArrowRecordBatch; + })(); + + v1beta1.AvroSchema = (function() { + + /** + * Properties of an AvroSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IAvroSchema + * @property {string|null} [schema] AvroSchema schema + */ + + /** + * Constructs a new AvroSchema. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an AvroSchema. + * @implements IAvroSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + */ + function AvroSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroSchema schema. + * @member {string} schema + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @instance + */ + AvroSchema.prototype.schema = ""; + + /** + * Creates a new AvroSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema instance + */ + AvroSchema.create = function create(properties) { + return new AvroSchema(properties); + }; + + /** + * Encodes the specified AvroSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.schema != null && Object.hasOwnProperty.call(message, "schema")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.schema); + return writer; + }; + + /** + * Encodes the specified AvroSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroSchema} message AvroSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.schema = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.schema != null && message.hasOwnProperty("schema")) + if (!$util.isString(message.schema)) + return "schema: string expected"; + return null; + }; + + /** + * Creates an AvroSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.AvroSchema} AvroSchema + */ + AvroSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); + if (object.schema != null) + message.schema = String(object.schema); + return message; + }; + + /** + * Creates a plain object from an AvroSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta1.AvroSchema} message AvroSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.schema = ""; + if (message.schema != null && message.hasOwnProperty("schema")) + object.schema = message.schema; + return object; + }; + + /** + * Converts this AvroSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @instance + * @returns {Object.} JSON object + */ + AvroSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AvroSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.AvroSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroSchema"; + }; + + return AvroSchema; + })(); + + v1beta1.AvroRows = (function() { + + /** + * Properties of an AvroRows. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IAvroRows + * @property {Uint8Array|null} [serializedBinaryRows] AvroRows serializedBinaryRows + * @property {number|Long|null} [rowCount] AvroRows rowCount + */ + + /** + * Constructs a new AvroRows. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents an AvroRows. + * @implements IAvroRows + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + */ + function AvroRows(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AvroRows serializedBinaryRows. + * @member {Uint8Array} serializedBinaryRows + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + */ + AvroRows.prototype.serializedBinaryRows = $util.newBuffer([]); + + /** + * AvroRows rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + */ + AvroRows.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new AvroRows instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows instance + */ + AvroRows.create = function create(properties) { + return new AvroRows(properties); + }; + + /** + * Encodes the specified AvroRows message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.serializedBinaryRows != null && Object.hasOwnProperty.call(message, "serializedBinaryRows")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.serializedBinaryRows); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified AvroRows message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.AvroRows.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IAvroRows} message AvroRows message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AvroRows.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.serializedBinaryRows = reader.bytes(); + break; + } + case 2: { + message.rowCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an AvroRows message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AvroRows.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an AvroRows message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AvroRows.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + if (!(message.serializedBinaryRows && typeof message.serializedBinaryRows.length === "number" || $util.isString(message.serializedBinaryRows))) + return "serializedBinaryRows: buffer expected"; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + return null; + }; + + /** + * Creates an AvroRows message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.AvroRows} AvroRows + */ + AvroRows.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.AvroRows) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); + if (object.serializedBinaryRows != null) + if (typeof object.serializedBinaryRows === "string") + $util.base64.decode(object.serializedBinaryRows, message.serializedBinaryRows = $util.newBuffer($util.base64.length(object.serializedBinaryRows)), 0); + else if (object.serializedBinaryRows.length >= 0) + message.serializedBinaryRows = object.serializedBinaryRows; + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an AvroRows message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {google.cloud.bigquery.storage.v1beta1.AvroRows} message AvroRows + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + AvroRows.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if (options.bytes === String) + object.serializedBinaryRows = ""; + else { + object.serializedBinaryRows = []; + if (options.bytes !== Array) + object.serializedBinaryRows = $util.newBuffer(object.serializedBinaryRows); + } + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.serializedBinaryRows != null && message.hasOwnProperty("serializedBinaryRows")) + object.serializedBinaryRows = options.bytes === String ? $util.base64.encode(message.serializedBinaryRows, 0, message.serializedBinaryRows.length) : options.bytes === Array ? Array.prototype.slice.call(message.serializedBinaryRows) : message.serializedBinaryRows; + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this AvroRows to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @instance + * @returns {Object.} JSON object + */ + AvroRows.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for AvroRows + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.AvroRows + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + AvroRows.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.AvroRows"; + }; + + return AvroRows; + })(); + + v1beta1.TableReadOptions = (function() { + + /** + * Properties of a TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableReadOptions + * @property {Array.|null} [selectedFields] TableReadOptions selectedFields + * @property {string|null} [rowRestriction] TableReadOptions rowRestriction + */ + + /** + * Constructs a new TableReadOptions. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableReadOptions. + * @implements ITableReadOptions + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + */ + function TableReadOptions(properties) { + this.selectedFields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReadOptions selectedFields. + * @member {Array.} selectedFields + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + */ + TableReadOptions.prototype.selectedFields = $util.emptyArray; + + /** + * TableReadOptions rowRestriction. + * @member {string} rowRestriction + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + */ + TableReadOptions.prototype.rowRestriction = ""; + + /** + * Creates a new TableReadOptions instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions instance + */ + TableReadOptions.create = function create(properties) { + return new TableReadOptions(properties); + }; + + /** + * Encodes the specified TableReadOptions message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selectedFields != null && message.selectedFields.length) + for (var i = 0; i < message.selectedFields.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selectedFields[i]); + if (message.rowRestriction != null && Object.hasOwnProperty.call(message, "rowRestriction")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.rowRestriction); + return writer; + }; + + /** + * Encodes the specified TableReadOptions message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReadOptions} message TableReadOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReadOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.selectedFields && message.selectedFields.length)) + message.selectedFields = []; + message.selectedFields.push(reader.string()); + break; + } + case 2: { + message.rowRestriction = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReadOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReadOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReadOptions message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReadOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.selectedFields != null && message.hasOwnProperty("selectedFields")) { + if (!Array.isArray(message.selectedFields)) + return "selectedFields: array expected"; + for (var i = 0; i < message.selectedFields.length; ++i) + if (!$util.isString(message.selectedFields[i])) + return "selectedFields: string[] expected"; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + if (!$util.isString(message.rowRestriction)) + return "rowRestriction: string expected"; + return null; + }; + + /** + * Creates a TableReadOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableReadOptions} TableReadOptions + */ + TableReadOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); + if (object.selectedFields) { + if (!Array.isArray(object.selectedFields)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableReadOptions.selectedFields: array expected"); + message.selectedFields = []; + for (var i = 0; i < object.selectedFields.length; ++i) + message.selectedFields[i] = String(object.selectedFields[i]); + } + if (object.rowRestriction != null) + message.rowRestriction = String(object.rowRestriction); + return message; + }; + + /** + * Creates a plain object from a TableReadOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} message TableReadOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReadOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.selectedFields = []; + if (options.defaults) + object.rowRestriction = ""; + if (message.selectedFields && message.selectedFields.length) { + object.selectedFields = []; + for (var j = 0; j < message.selectedFields.length; ++j) + object.selectedFields[j] = message.selectedFields[j]; + } + if (message.rowRestriction != null && message.hasOwnProperty("rowRestriction")) + object.rowRestriction = message.rowRestriction; + return object; + }; + + /** + * Converts this TableReadOptions to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @instance + * @returns {Object.} JSON object + */ + TableReadOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableReadOptions + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableReadOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReadOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReadOptions"; + }; + + return TableReadOptions; + })(); + + v1beta1.BigQueryStorage = (function() { + + /** + * Constructs a new BigQueryStorage service. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BigQueryStorage + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function BigQueryStorage(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (BigQueryStorage.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = BigQueryStorage; + + /** + * Creates new BigQueryStorage service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {BigQueryStorage} RPC service. Useful where requests and/or responses are streamed. + */ + BigQueryStorage.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|createReadSession}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef CreateReadSessionCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} [response] ReadSession + */ + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSessionCallback} callback Node-style callback called with the error, if any, and ReadSession + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.createReadSession = function createReadSession(request, callback) { + return this.rpcCall(createReadSession, $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadSession, request, callback); + }, "name", { value: "CreateReadSession" }); + + /** + * Calls CreateReadSession. + * @function createReadSession + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} request CreateReadSessionRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|readRows}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef ReadRowsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} [response] ReadRowsResponse + */ + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRowsCallback} callback Node-style callback called with the error, if any, and ReadRowsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.readRows = function readRows(request, callback) { + return this.rpcCall(readRows, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest, $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, request, callback); + }, "name", { value: "ReadRows" }); + + /** + * Calls ReadRows. + * @function readRows + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} request ReadRowsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|batchCreateReadSessionStreams}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef BatchCreateReadSessionStreamsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} [response] BatchCreateReadSessionStreamsResponse + */ + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreamsCallback} callback Node-style callback called with the error, if any, and BatchCreateReadSessionStreamsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.batchCreateReadSessionStreams = function batchCreateReadSessionStreams(request, callback) { + return this.rpcCall(batchCreateReadSessionStreams, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest, $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse, request, callback); + }, "name", { value: "BatchCreateReadSessionStreams" }); + + /** + * Calls BatchCreateReadSessionStreams. + * @function batchCreateReadSessionStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} request BatchCreateReadSessionStreamsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|finalizeStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef FinalizeStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.protobuf.Empty} [response] Empty + */ + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStreamCallback} callback Node-style callback called with the error, if any, and Empty + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.finalizeStream = function finalizeStream(request, callback) { + return this.rpcCall(finalizeStream, $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest, $root.google.protobuf.Empty, request, callback); + }, "name", { value: "FinalizeStream" }); + + /** + * Calls FinalizeStream. + * @function finalizeStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} request FinalizeStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta1.BigQueryStorage|splitReadStream}. + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @typedef SplitReadStreamCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} [response] SplitReadStreamResponse + */ + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStreamCallback} callback Node-style callback called with the error, if any, and SplitReadStreamResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(BigQueryStorage.prototype.splitReadStream = function splitReadStream(request, callback) { + return this.rpcCall(splitReadStream, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest, $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse, request, callback); + }, "name", { value: "SplitReadStream" }); + + /** + * Calls SplitReadStream. + * @function splitReadStream + * @memberof google.cloud.bigquery.storage.v1beta1.BigQueryStorage + * @instance + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} request SplitReadStreamRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return BigQueryStorage; + })(); + + v1beta1.Stream = (function() { + + /** + * Properties of a Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStream + * @property {string|null} [name] Stream name + */ + + /** + * Constructs a new Stream. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Stream. + * @implements IStream + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + */ + function Stream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Stream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @instance + */ + Stream.prototype.name = ""; + + /** + * Creates a new Stream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream instance + */ + Stream.create = function create(properties) { + return new Stream(properties); + }; + + /** + * Encodes the specified Stream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Stream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified Stream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Stream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStream} message Stream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Stream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Stream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Stream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Stream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Stream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Stream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Stream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a Stream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.Stream} Stream + */ + Stream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Stream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a Stream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {google.cloud.bigquery.storage.v1beta1.Stream} message Stream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Stream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this Stream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @instance + * @returns {Object.} JSON object + */ + Stream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Stream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.Stream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Stream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Stream"; + }; + + return Stream; + })(); + + v1beta1.StreamPosition = (function() { + + /** + * Properties of a StreamPosition. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStreamPosition + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] StreamPosition stream + * @property {number|Long|null} [offset] StreamPosition offset + */ + + /** + * Constructs a new StreamPosition. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a StreamPosition. + * @implements IStreamPosition + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + */ + function StreamPosition(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamPosition stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + */ + StreamPosition.prototype.stream = null; + + /** + * StreamPosition offset. + * @member {number|Long} offset + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + */ + StreamPosition.prototype.offset = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new StreamPosition instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition instance + */ + StreamPosition.create = function create(properties) { + return new StreamPosition(properties); + }; + + /** + * Encodes the specified StreamPosition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamPosition.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.offset != null && Object.hasOwnProperty.call(message, "offset")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.offset); + return writer; + }; + + /** + * Encodes the specified StreamPosition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamPosition.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamPosition} message StreamPosition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamPosition.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamPosition message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamPosition.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.offset = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamPosition message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamPosition.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamPosition message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamPosition.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + if (error) + return "stream." + error; + } + if (message.offset != null && message.hasOwnProperty("offset")) + if (!$util.isInteger(message.offset) && !(message.offset && $util.isInteger(message.offset.low) && $util.isInteger(message.offset.high))) + return "offset: integer|Long expected"; + return null; + }; + + /** + * Creates a StreamPosition message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.StreamPosition} StreamPosition + */ + StreamPosition.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamPosition) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamPosition.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + } + if (object.offset != null) + if ($util.Long) + (message.offset = $util.Long.fromValue(object.offset)).unsigned = false; + else if (typeof object.offset === "string") + message.offset = parseInt(object.offset, 10); + else if (typeof object.offset === "number") + message.offset = object.offset; + else if (typeof object.offset === "object") + message.offset = new $util.LongBits(object.offset.low >>> 0, object.offset.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a StreamPosition message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} message StreamPosition + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamPosition.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.stream = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.offset = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.offset = options.longs === String ? "0" : 0; + } + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + if (message.offset != null && message.hasOwnProperty("offset")) + if (typeof message.offset === "number") + object.offset = options.longs === String ? String(message.offset) : message.offset; + else + object.offset = options.longs === String ? $util.Long.prototype.toString.call(message.offset) : options.longs === Number ? new $util.LongBits(message.offset.low >>> 0, message.offset.high >>> 0).toNumber() : message.offset; + return object; + }; + + /** + * Converts this StreamPosition to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @instance + * @returns {Object.} JSON object + */ + StreamPosition.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamPosition + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.StreamPosition + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamPosition.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamPosition"; + }; + + return StreamPosition; + })(); + + v1beta1.ReadSession = (function() { + + /** + * Properties of a ReadSession. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadSession + * @property {string|null} [name] ReadSession name + * @property {google.protobuf.ITimestamp|null} [expireTime] ReadSession expireTime + * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadSession avroSchema + * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadSession arrowSchema + * @property {Array.|null} [streams] ReadSession streams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] ReadSession tableReference + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] ReadSession tableModifiers + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] ReadSession shardingStrategy + */ + + /** + * Constructs a new ReadSession. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadSession. + * @implements IReadSession + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set + */ + function ReadSession(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadSession name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.name = ""; + + /** + * ReadSession expireTime. + * @member {google.protobuf.ITimestamp|null|undefined} expireTime + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.expireTime = null; + + /** + * ReadSession avroSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.avroSchema = null; + + /** + * ReadSession arrowSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.arrowSchema = null; + + /** + * ReadSession streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.streams = $util.emptyArray; + + /** + * ReadSession tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableReference = null; + + /** + * ReadSession tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.tableModifiers = null; + + /** + * ReadSession shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + ReadSession.prototype.shardingStrategy = 0; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadSession schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + */ + Object.defineProperty(ReadSession.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadSession instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession instance + */ + ReadSession.create = function create(properties) { + return new ReadSession(properties); + }; + + /** + * Encodes the specified ReadSession message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.expireTime != null && Object.hasOwnProperty.call(message, "expireTime")) + $root.google.protobuf.Timestamp.encode(message.expireTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.shardingStrategy); + return writer; + }; + + /** + * Encodes the specified ReadSession message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadSession.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadSession} message ReadSession message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadSession.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.expireTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 5: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 6: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + } + case 7: { + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + } + case 8: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 9: { + message.shardingStrategy = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadSession message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadSession.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadSession message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadSession.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.expireTime); + if (error) + return "expireTime." + error; + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (error) + return "tableReference." + error; + } + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates a ReadSession message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadSession} ReadSession + */ + ReadSession.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadSession) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); + if (object.name != null) + message.name = String(object.name); + if (object.expireTime != null) { + if (typeof object.expireTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.expireTime: object expected"); + message.expireTime = $root.google.protobuf.Timestamp.fromObject(object.expireTime); + } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); + } + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadSession.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + switch (object.shardingStrategy) { + default: + if (typeof object.shardingStrategy === "number") { + message.shardingStrategy = object.shardingStrategy; + break; + } + break; + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from a ReadSession message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} message ReadSession + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadSession.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (options.defaults) { + object.name = ""; + object.expireTime = null; + object.tableReference = null; + object.tableModifiers = null; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.expireTime != null && message.hasOwnProperty("expireTime")) + object.expireTime = $root.google.protobuf.Timestamp.toObject(message.expireTime, options); + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + return object; + }; + + /** + * Converts this ReadSession to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @instance + * @returns {Object.} JSON object + */ + ReadSession.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadSession + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadSession + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadSession.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadSession"; + }; + + return ReadSession; + })(); + + v1beta1.CreateReadSessionRequest = (function() { + + /** + * Properties of a CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ICreateReadSessionRequest + * @property {google.cloud.bigquery.storage.v1beta1.ITableReference|null} [tableReference] CreateReadSessionRequest tableReference + * @property {string|null} [parent] CreateReadSessionRequest parent + * @property {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null} [tableModifiers] CreateReadSessionRequest tableModifiers + * @property {number|null} [requestedStreams] CreateReadSessionRequest requestedStreams + * @property {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null} [readOptions] CreateReadSessionRequest readOptions + * @property {google.cloud.bigquery.storage.v1beta1.DataFormat|null} [format] CreateReadSessionRequest format + * @property {google.cloud.bigquery.storage.v1beta1.ShardingStrategy|null} [shardingStrategy] CreateReadSessionRequest shardingStrategy + */ + + /** + * Constructs a new CreateReadSessionRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a CreateReadSessionRequest. + * @implements ICreateReadSessionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + */ + function CreateReadSessionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateReadSessionRequest tableReference. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReference|null|undefined} tableReference + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.tableReference = null; + + /** + * CreateReadSessionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.parent = ""; + + /** + * CreateReadSessionRequest tableModifiers. + * @member {google.cloud.bigquery.storage.v1beta1.ITableModifiers|null|undefined} tableModifiers + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.tableModifiers = null; + + /** + * CreateReadSessionRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.requestedStreams = 0; + + /** + * CreateReadSessionRequest readOptions. + * @member {google.cloud.bigquery.storage.v1beta1.ITableReadOptions|null|undefined} readOptions + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.readOptions = null; + + /** + * CreateReadSessionRequest format. + * @member {google.cloud.bigquery.storage.v1beta1.DataFormat} format + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.format = 0; + + /** + * CreateReadSessionRequest shardingStrategy. + * @member {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} shardingStrategy + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + */ + CreateReadSessionRequest.prototype.shardingStrategy = 0; + + /** + * Creates a new CreateReadSessionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest instance + */ + CreateReadSessionRequest.create = function create(properties) { + return new CreateReadSessionRequest(properties); + }; + + /** + * Encodes the specified CreateReadSessionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.tableReference != null && Object.hasOwnProperty.call(message, "tableReference")) + $root.google.cloud.bigquery.storage.v1beta1.TableReference.encode(message.tableReference, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.tableModifiers != null && Object.hasOwnProperty.call(message, "tableModifiers")) + $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.encode(message.tableModifiers, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.requestedStreams); + if (message.readOptions != null && Object.hasOwnProperty.call(message, "readOptions")) + $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.encode(message.readOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.format != null && Object.hasOwnProperty.call(message, "format")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.format); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.parent); + if (message.shardingStrategy != null && Object.hasOwnProperty.call(message, "shardingStrategy")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.shardingStrategy); + return writer; + }; + + /** + * Encodes the specified CreateReadSessionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest} message CreateReadSessionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateReadSessionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); + break; + } + case 6: { + message.parent = reader.string(); + break; + } + case 2: { + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.decode(reader, reader.uint32()); + break; + } + case 3: { + message.requestedStreams = reader.int32(); + break; + } + case 4: { + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.decode(reader, reader.uint32()); + break; + } + case 5: { + message.format = reader.int32(); + break; + } + case 7: { + message.shardingStrategy = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateReadSessionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateReadSessionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateReadSessionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateReadSessionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.tableReference != null && message.hasOwnProperty("tableReference")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReference.verify(message.tableReference); + if (error) + return "tableReference." + error; + } + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.verify(message.tableModifiers); + if (error) + return "tableModifiers." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.verify(message.readOptions); + if (error) + return "readOptions." + error; + } + if (message.format != null && message.hasOwnProperty("format")) + switch (message.format) { + default: + return "format: enum value expected"; + case 0: + case 1: + case 3: + break; + } + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + switch (message.shardingStrategy) { + default: + return "shardingStrategy: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates a CreateReadSessionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} CreateReadSessionRequest + */ + CreateReadSessionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); + if (object.tableReference != null) { + if (typeof object.tableReference !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableReference: object expected"); + message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.fromObject(object.tableReference); + } + if (object.parent != null) + message.parent = String(object.parent); + if (object.tableModifiers != null) { + if (typeof object.tableModifiers !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.tableModifiers: object expected"); + message.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.fromObject(object.tableModifiers); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + if (object.readOptions != null) { + if (typeof object.readOptions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest.readOptions: object expected"); + message.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.fromObject(object.readOptions); + } + switch (object.format) { + default: + if (typeof object.format === "number") { + message.format = object.format; + break; + } + break; + case "DATA_FORMAT_UNSPECIFIED": + case 0: + message.format = 0; + break; + case "AVRO": + case 1: + message.format = 1; + break; + case "ARROW": + case 3: + message.format = 3; + break; + } + switch (object.shardingStrategy) { + default: + if (typeof object.shardingStrategy === "number") { + message.shardingStrategy = object.shardingStrategy; + break; + } + break; + case "SHARDING_STRATEGY_UNSPECIFIED": + case 0: + message.shardingStrategy = 0; + break; + case "LIQUID": + case 1: + message.shardingStrategy = 1; + break; + case "BALANCED": + case 2: + message.shardingStrategy = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from a CreateReadSessionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest} message CreateReadSessionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateReadSessionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.tableReference = null; + object.tableModifiers = null; + object.requestedStreams = 0; + object.readOptions = null; + object.format = options.enums === String ? "DATA_FORMAT_UNSPECIFIED" : 0; + object.parent = ""; + object.shardingStrategy = options.enums === String ? "SHARDING_STRATEGY_UNSPECIFIED" : 0; + } + if (message.tableReference != null && message.hasOwnProperty("tableReference")) + object.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.toObject(message.tableReference, options); + if (message.tableModifiers != null && message.hasOwnProperty("tableModifiers")) + object.tableModifiers = $root.google.cloud.bigquery.storage.v1beta1.TableModifiers.toObject(message.tableModifiers, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + if (message.readOptions != null && message.hasOwnProperty("readOptions")) + object.readOptions = $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions.toObject(message.readOptions, options); + if (message.format != null && message.hasOwnProperty("format")) + object.format = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] === undefined ? message.format : $root.google.cloud.bigquery.storage.v1beta1.DataFormat[message.format] : message.format; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.shardingStrategy != null && message.hasOwnProperty("shardingStrategy")) + object.shardingStrategy = options.enums === String ? $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] === undefined ? message.shardingStrategy : $root.google.cloud.bigquery.storage.v1beta1.ShardingStrategy[message.shardingStrategy] : message.shardingStrategy; + return object; + }; + + /** + * Converts this CreateReadSessionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateReadSessionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CreateReadSessionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateReadSessionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest"; + }; + + return CreateReadSessionRequest; + })(); + + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1beta1.DataFormat + * @enum {number} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=3 ARROW value + */ + v1beta1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[3] = "ARROW"] = 3; + return values; + })(); + + /** + * ShardingStrategy enum. + * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy + * @enum {number} + * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value + * @property {number} LIQUID=1 LIQUID value + * @property {number} BALANCED=2 BALANCED value + */ + v1beta1.ShardingStrategy = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; + values[valuesById[1] = "LIQUID"] = 1; + values[valuesById[2] = "BALANCED"] = 2; + return values; + })(); + + v1beta1.ReadRowsRequest = (function() { + + /** + * Properties of a ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null} [readPosition] ReadRowsRequest readPosition + */ + + /** + * Constructs a new ReadRowsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsRequest. + * @implements IReadRowsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + */ + function ReadRowsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsRequest readPosition. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamPosition|null|undefined} readPosition + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + */ + ReadRowsRequest.prototype.readPosition = null; + + /** + * Creates a new ReadRowsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest instance + */ + ReadRowsRequest.create = function create(properties) { + return new ReadRowsRequest(properties); + }; + + /** + * Encodes the specified ReadRowsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.readPosition != null && Object.hasOwnProperty.call(message, "readPosition")) + $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.encode(message.readPosition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ReadRowsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsRequest} message ReadRowsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.verify(message.readPosition); + if (error) + return "readPosition." + error; + } + return null; + }; + + /** + * Creates a ReadRowsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} ReadRowsRequest + */ + ReadRowsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); + if (object.readPosition != null) { + if (typeof object.readPosition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsRequest.readPosition: object expected"); + message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.fromObject(object.readPosition); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsRequest} message ReadRowsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.readPosition = null; + if (message.readPosition != null && message.hasOwnProperty("readPosition")) + object.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.toObject(message.readPosition, options); + return object; + }; + + /** + * Converts this ReadRowsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @instance + * @returns {Object.} JSON object + */ + ReadRowsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadRowsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsRequest"; + }; + + return ReadRowsRequest; + })(); + + v1beta1.StreamStatus = (function() { + + /** + * Properties of a StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IStreamStatus + * @property {number|Long|null} [estimatedRowCount] StreamStatus estimatedRowCount + * @property {number|null} [fractionConsumed] StreamStatus fractionConsumed + * @property {google.cloud.bigquery.storage.v1beta1.IProgress|null} [progress] StreamStatus progress + * @property {boolean|null} [isSplittable] StreamStatus isSplittable + */ + + /** + * Constructs a new StreamStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a StreamStatus. + * @implements IStreamStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + */ + function StreamStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamStatus estimatedRowCount. + * @member {number|Long} estimatedRowCount + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * StreamStatus fractionConsumed. + * @member {number} fractionConsumed + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.fractionConsumed = 0; + + /** + * StreamStatus progress. + * @member {google.cloud.bigquery.storage.v1beta1.IProgress|null|undefined} progress + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.progress = null; + + /** + * StreamStatus isSplittable. + * @member {boolean} isSplittable + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + */ + StreamStatus.prototype.isSplittable = false; + + /** + * Creates a new StreamStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus instance + */ + StreamStatus.create = function create(properties) { + return new StreamStatus(properties); + }; + + /** + * Encodes the specified StreamStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.estimatedRowCount); + if (message.fractionConsumed != null && Object.hasOwnProperty.call(message, "fractionConsumed")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fractionConsumed); + if (message.isSplittable != null && Object.hasOwnProperty.call(message, "isSplittable")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isSplittable); + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) + $root.google.cloud.bigquery.storage.v1beta1.Progress.encode(message.progress, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.StreamStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IStreamStatus} message StreamStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.estimatedRowCount = reader.int64(); + break; + } + case 2: { + message.fractionConsumed = reader.float(); + break; + } + case 4: { + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.decode(reader, reader.uint32()); + break; + } + case 3: { + message.isSplittable = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) + return "estimatedRowCount: integer|Long expected"; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + if (typeof message.fractionConsumed !== "number") + return "fractionConsumed: number expected"; + if (message.progress != null && message.hasOwnProperty("progress")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Progress.verify(message.progress); + if (error) + return "progress." + error; + } + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + if (typeof message.isSplittable !== "boolean") + return "isSplittable: boolean expected"; + return null; + }; + + /** + * Creates a StreamStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.StreamStatus} StreamStatus + */ + StreamStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.StreamStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); + if (object.estimatedRowCount != null) + if ($util.Long) + (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; + else if (typeof object.estimatedRowCount === "string") + message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); + else if (typeof object.estimatedRowCount === "number") + message.estimatedRowCount = object.estimatedRowCount; + else if (typeof object.estimatedRowCount === "object") + message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); + if (object.fractionConsumed != null) + message.fractionConsumed = Number(object.fractionConsumed); + if (object.progress != null) { + if (typeof object.progress !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.StreamStatus.progress: object expected"); + message.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.fromObject(object.progress); + } + if (object.isSplittable != null) + message.isSplittable = Boolean(object.isSplittable); + return message; + }; + + /** + * Creates a plain object from a StreamStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.StreamStatus} message StreamStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedRowCount = options.longs === String ? "0" : 0; + object.fractionConsumed = 0; + object.isSplittable = false; + object.progress = null; + } + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (typeof message.estimatedRowCount === "number") + object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; + else + object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; + if (message.fractionConsumed != null && message.hasOwnProperty("fractionConsumed")) + object.fractionConsumed = options.json && !isFinite(message.fractionConsumed) ? String(message.fractionConsumed) : message.fractionConsumed; + if (message.isSplittable != null && message.hasOwnProperty("isSplittable")) + object.isSplittable = message.isSplittable; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = $root.google.cloud.bigquery.storage.v1beta1.Progress.toObject(message.progress, options); + return object; + }; + + /** + * Converts this StreamStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @instance + * @returns {Object.} JSON object + */ + StreamStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamStatus + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.StreamStatus + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.StreamStatus"; + }; + + return StreamStatus; + })(); + + v1beta1.Progress = (function() { + + /** + * Properties of a Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IProgress + * @property {number|null} [atResponseStart] Progress atResponseStart + * @property {number|null} [atResponseEnd] Progress atResponseEnd + */ + + /** + * Constructs a new Progress. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a Progress. + * @implements IProgress + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + */ + function Progress(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Progress atResponseStart. + * @member {number} atResponseStart + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseStart = 0; + + /** + * Progress atResponseEnd. + * @member {number} atResponseEnd + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + */ + Progress.prototype.atResponseEnd = 0; + + /** + * Creates a new Progress instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress instance + */ + Progress.create = function create(properties) { + return new Progress(properties); + }; + + /** + * Encodes the specified Progress message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.atResponseStart != null && Object.hasOwnProperty.call(message, "atResponseStart")) + writer.uint32(/* id 1, wireType 5 =*/13).float(message.atResponseStart); + if (message.atResponseEnd != null && Object.hasOwnProperty.call(message, "atResponseEnd")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.atResponseEnd); + return writer; + }; + + /** + * Encodes the specified Progress message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.Progress.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IProgress} message Progress message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Progress.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Progress message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.atResponseStart = reader.float(); + break; + } + case 2: { + message.atResponseEnd = reader.float(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Progress message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Progress.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Progress message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Progress.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + if (typeof message.atResponseStart !== "number") + return "atResponseStart: number expected"; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + if (typeof message.atResponseEnd !== "number") + return "atResponseEnd: number expected"; + return null; + }; + + /** + * Creates a Progress message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.Progress} Progress + */ + Progress.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.Progress) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); + if (object.atResponseStart != null) + message.atResponseStart = Number(object.atResponseStart); + if (object.atResponseEnd != null) + message.atResponseEnd = Number(object.atResponseEnd); + return message; + }; + + /** + * Creates a plain object from a Progress message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {google.cloud.bigquery.storage.v1beta1.Progress} message Progress + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Progress.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.atResponseStart = 0; + object.atResponseEnd = 0; + } + if (message.atResponseStart != null && message.hasOwnProperty("atResponseStart")) + object.atResponseStart = options.json && !isFinite(message.atResponseStart) ? String(message.atResponseStart) : message.atResponseStart; + if (message.atResponseEnd != null && message.hasOwnProperty("atResponseEnd")) + object.atResponseEnd = options.json && !isFinite(message.atResponseEnd) ? String(message.atResponseEnd) : message.atResponseEnd; + return object; + }; + + /** + * Converts this Progress to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @instance + * @returns {Object.} JSON object + */ + Progress.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Progress + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.Progress + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Progress.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.Progress"; + }; + + return Progress; + })(); + + v1beta1.ThrottleStatus = (function() { + + /** + * Properties of a ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IThrottleStatus + * @property {number|null} [throttlePercent] ThrottleStatus throttlePercent + */ + + /** + * Constructs a new ThrottleStatus. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ThrottleStatus. + * @implements IThrottleStatus + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + */ + function ThrottleStatus(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ThrottleStatus throttlePercent. + * @member {number} throttlePercent + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + */ + ThrottleStatus.prototype.throttlePercent = 0; + + /** + * Creates a new ThrottleStatus instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus instance + */ + ThrottleStatus.create = function create(properties) { + return new ThrottleStatus(properties); + }; + + /** + * Encodes the specified ThrottleStatus message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.throttlePercent != null && Object.hasOwnProperty.call(message, "throttlePercent")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.throttlePercent); + return writer; + }; + + /** + * Encodes the specified ThrottleStatus message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IThrottleStatus} message ThrottleStatus message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ThrottleStatus.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.throttlePercent = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ThrottleStatus message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ThrottleStatus.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ThrottleStatus message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ThrottleStatus.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + if (!$util.isInteger(message.throttlePercent)) + return "throttlePercent: integer expected"; + return null; + }; + + /** + * Creates a ThrottleStatus message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} ThrottleStatus + */ + ThrottleStatus.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); + if (object.throttlePercent != null) + message.throttlePercent = object.throttlePercent | 0; + return message; + }; + + /** + * Creates a plain object from a ThrottleStatus message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ThrottleStatus} message ThrottleStatus + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ThrottleStatus.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.throttlePercent = 0; + if (message.throttlePercent != null && message.hasOwnProperty("throttlePercent")) + object.throttlePercent = message.throttlePercent; + return object; + }; + + /** + * Converts this ThrottleStatus to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @instance + * @returns {Object.} JSON object + */ + ThrottleStatus.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ThrottleStatus + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ThrottleStatus + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ThrottleStatus.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ThrottleStatus"; + }; + + return ThrottleStatus; + })(); + + v1beta1.ReadRowsResponse = (function() { + + /** + * Properties of a ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IReadRowsResponse + * @property {google.cloud.bigquery.storage.v1beta1.IAvroRows|null} [avroRows] ReadRowsResponse avroRows + * @property {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null} [arrowRecordBatch] ReadRowsResponse arrowRecordBatch + * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount + * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status + * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus + */ + + /** + * Constructs a new ReadRowsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a ReadRowsResponse. + * @implements IReadRowsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + */ + function ReadRowsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadRowsResponse avroRows. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroRows|null|undefined} avroRows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroRows = null; + + /** + * ReadRowsResponse arrowRecordBatch. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch|null|undefined} arrowRecordBatch + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowRecordBatch = null; + + /** + * ReadRowsResponse rowCount. + * @member {number|Long} rowCount + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.rowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * ReadRowsResponse status. + * @member {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null|undefined} status + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.status = null; + + /** + * ReadRowsResponse throttleStatus. + * @member {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null|undefined} throttleStatus + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.throttleStatus = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ReadRowsResponse rows. + * @member {"avroRows"|"arrowRecordBatch"|undefined} rows + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "rows", { + get: $util.oneOfGetter($oneOfFields = ["avroRows", "arrowRecordBatch"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ReadRowsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse instance + */ + ReadRowsResponse.create = function create(properties) { + return new ReadRowsResponse(properties); + }; + + /** + * Encodes the specified ReadRowsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.status != null && Object.hasOwnProperty.call(message, "status")) + $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.encode(message.status, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.avroRows != null && Object.hasOwnProperty.call(message, "avroRows")) + $root.google.cloud.bigquery.storage.v1beta1.AvroRows.encode(message.avroRows, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.arrowRecordBatch != null && Object.hasOwnProperty.call(message, "arrowRecordBatch")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.encode(message.arrowRecordBatch, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.throttleStatus != null && Object.hasOwnProperty.call(message, "throttleStatus")) + $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) + writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + return writer; + }; + + /** + * Encodes the specified ReadRowsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IReadRowsResponse} message ReadRowsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadRowsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); + break; + } + case 4: { + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + } + case 6: { + message.rowCount = reader.int64(); + break; + } + case 2: { + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.decode(reader, reader.uint32()); + break; + } + case 5: { + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadRowsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadRowsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadRowsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadRowsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.verify(message.avroRows); + if (error) + return "avroRows." + error; + } + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.verify(message.arrowRecordBatch); + if (error) + return "arrowRecordBatch." + error; + } + } + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (!$util.isInteger(message.rowCount) && !(message.rowCount && $util.isInteger(message.rowCount.low) && $util.isInteger(message.rowCount.high))) + return "rowCount: integer|Long expected"; + if (message.status != null && message.hasOwnProperty("status")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.verify(message.status); + if (error) + return "status." + error; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.verify(message.throttleStatus); + if (error) + return "throttleStatus." + error; + } + return null; + }; + + /** + * Creates a ReadRowsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} ReadRowsResponse + */ + ReadRowsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); + if (object.avroRows != null) { + if (typeof object.avroRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroRows: object expected"); + message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.fromObject(object.avroRows); + } + if (object.arrowRecordBatch != null) { + if (typeof object.arrowRecordBatch !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowRecordBatch: object expected"); + message.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.fromObject(object.arrowRecordBatch); + } + if (object.rowCount != null) + if ($util.Long) + (message.rowCount = $util.Long.fromValue(object.rowCount)).unsigned = false; + else if (typeof object.rowCount === "string") + message.rowCount = parseInt(object.rowCount, 10); + else if (typeof object.rowCount === "number") + message.rowCount = object.rowCount; + else if (typeof object.rowCount === "object") + message.rowCount = new $util.LongBits(object.rowCount.low >>> 0, object.rowCount.high >>> 0).toNumber(); + if (object.status != null) { + if (typeof object.status !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.status: object expected"); + message.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.fromObject(object.status); + } + if (object.throttleStatus != null) { + if (typeof object.throttleStatus !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); + message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); + } + return message; + }; + + /** + * Creates a plain object from a ReadRowsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} message ReadRowsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadRowsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.status = null; + object.throttleStatus = null; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.rowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.rowCount = options.longs === String ? "0" : 0; + } + if (message.status != null && message.hasOwnProperty("status")) + object.status = $root.google.cloud.bigquery.storage.v1beta1.StreamStatus.toObject(message.status, options); + if (message.avroRows != null && message.hasOwnProperty("avroRows")) { + object.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.toObject(message.avroRows, options); + if (options.oneofs) + object.rows = "avroRows"; + } + if (message.arrowRecordBatch != null && message.hasOwnProperty("arrowRecordBatch")) { + object.arrowRecordBatch = $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch.toObject(message.arrowRecordBatch, options); + if (options.oneofs) + object.rows = "arrowRecordBatch"; + } + if (message.throttleStatus != null && message.hasOwnProperty("throttleStatus")) + object.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.toObject(message.throttleStatus, options); + if (message.rowCount != null && message.hasOwnProperty("rowCount")) + if (typeof message.rowCount === "number") + object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; + else + object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + return object; + }; + + /** + * Converts this ReadRowsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + * @returns {Object.} JSON object + */ + ReadRowsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadRowsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadRowsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"; + }; + + return ReadRowsResponse; + })(); + + v1beta1.BatchCreateReadSessionStreamsRequest = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsRequest + * @property {google.cloud.bigquery.storage.v1beta1.IReadSession|null} [session] BatchCreateReadSessionStreamsRequest session + * @property {number|null} [requestedStreams] BatchCreateReadSessionStreamsRequest requestedStreams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsRequest. + * @implements IBatchCreateReadSessionStreamsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsRequest session. + * @member {google.cloud.bigquery.storage.v1beta1.IReadSession|null|undefined} session + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.session = null; + + /** + * BatchCreateReadSessionStreamsRequest requestedStreams. + * @member {number} requestedStreams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + */ + BatchCreateReadSessionStreamsRequest.prototype.requestedStreams = 0; + + /** + * Creates a new BatchCreateReadSessionStreamsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest instance + */ + BatchCreateReadSessionStreamsRequest.create = function create(properties) { + return new BatchCreateReadSessionStreamsRequest(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.session != null && Object.hasOwnProperty.call(message, "session")) + $root.google.cloud.bigquery.storage.v1beta1.ReadSession.encode(message.session, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.requestedStreams != null && Object.hasOwnProperty.call(message, "requestedStreams")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.requestedStreams); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); + break; + } + case 2: { + message.requestedStreams = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.session != null && message.hasOwnProperty("session")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.verify(message.session); + if (error) + return "session." + error; + } + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + if (!$util.isInteger(message.requestedStreams)) + return "requestedStreams: integer expected"; + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} BatchCreateReadSessionStreamsRequest + */ + BatchCreateReadSessionStreamsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); + if (object.session != null) { + if (typeof object.session !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest.session: object expected"); + message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.fromObject(object.session); + } + if (object.requestedStreams != null) + message.requestedStreams = object.requestedStreams | 0; + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest} message BatchCreateReadSessionStreamsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.session = null; + object.requestedStreams = 0; + } + if (message.session != null && message.hasOwnProperty("session")) + object.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.toObject(message.session, options); + if (message.requestedStreams != null && message.hasOwnProperty("requestedStreams")) + object.requestedStreams = message.requestedStreams; + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateReadSessionStreamsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest"; + }; + + return BatchCreateReadSessionStreamsRequest; + })(); + + v1beta1.BatchCreateReadSessionStreamsResponse = (function() { + + /** + * Properties of a BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IBatchCreateReadSessionStreamsResponse + * @property {Array.|null} [streams] BatchCreateReadSessionStreamsResponse streams + */ + + /** + * Constructs a new BatchCreateReadSessionStreamsResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a BatchCreateReadSessionStreamsResponse. + * @implements IBatchCreateReadSessionStreamsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + */ + function BatchCreateReadSessionStreamsResponse(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateReadSessionStreamsResponse streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + */ + BatchCreateReadSessionStreamsResponse.prototype.streams = $util.emptyArray; + + /** + * Creates a new BatchCreateReadSessionStreamsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse instance + */ + BatchCreateReadSessionStreamsResponse.create = function create(properties) { + return new BatchCreateReadSessionStreamsResponse(properties); + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCreateReadSessionStreamsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateReadSessionStreamsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateReadSessionStreamsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateReadSessionStreamsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateReadSessionStreamsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateReadSessionStreamsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a BatchCreateReadSessionStreamsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} BatchCreateReadSessionStreamsResponse + */ + BatchCreateReadSessionStreamsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCreateReadSessionStreamsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse} message BatchCreateReadSessionStreamsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateReadSessionStreamsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this BatchCreateReadSessionStreamsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCreateReadSessionStreamsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateReadSessionStreamsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateReadSessionStreamsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"; + }; + + return BatchCreateReadSessionStreamsResponse; + })(); + + v1beta1.FinalizeStreamRequest = (function() { + + /** + * Properties of a FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface IFinalizeStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [stream] FinalizeStreamRequest stream + */ + + /** + * Constructs a new FinalizeStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a FinalizeStreamRequest. + * @implements IFinalizeStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + */ + function FinalizeStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FinalizeStreamRequest stream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} stream + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + */ + FinalizeStreamRequest.prototype.stream = null; + + /** + * Creates a new FinalizeStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest instance + */ + FinalizeStreamRequest.create = function create(properties) { + return new FinalizeStreamRequest(properties); + }; + + /** + * Encodes the specified FinalizeStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.stream != null && Object.hasOwnProperty.call(message, "stream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.stream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FinalizeStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest} message FinalizeStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FinalizeStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FinalizeStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FinalizeStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FinalizeStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FinalizeStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.stream != null && message.hasOwnProperty("stream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.stream); + if (error) + return "stream." + error; + } + return null; + }; + + /** + * Creates a FinalizeStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} FinalizeStreamRequest + */ + FinalizeStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); + if (object.stream != null) { + if (typeof object.stream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest.stream: object expected"); + message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.stream); + } + return message; + }; + + /** + * Creates a plain object from a FinalizeStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest} message FinalizeStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FinalizeStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.stream = null; + if (message.stream != null && message.hasOwnProperty("stream")) + object.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.stream, options); + return object; + }; + + /** + * Converts this FinalizeStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @instance + * @returns {Object.} JSON object + */ + FinalizeStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FinalizeStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FinalizeStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest"; + }; + + return FinalizeStreamRequest; + })(); + + v1beta1.SplitReadStreamRequest = (function() { + + /** + * Properties of a SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamRequest + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [originalStream] SplitReadStreamRequest originalStream + * @property {number|null} [fraction] SplitReadStreamRequest fraction + */ + + /** + * Constructs a new SplitReadStreamRequest. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamRequest. + * @implements ISplitReadStreamRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + */ + function SplitReadStreamRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamRequest originalStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} originalStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.originalStream = null; + + /** + * SplitReadStreamRequest fraction. + * @member {number} fraction + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + */ + SplitReadStreamRequest.prototype.fraction = 0; + + /** + * Creates a new SplitReadStreamRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest instance + */ + SplitReadStreamRequest.create = function create(properties) { + return new SplitReadStreamRequest(properties); + }; + + /** + * Encodes the specified SplitReadStreamRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.originalStream != null && Object.hasOwnProperty.call(message, "originalStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.originalStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fraction != null && Object.hasOwnProperty.call(message, "fraction")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.fraction); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest} message SplitReadStreamRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.fraction = reader.float(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.originalStream != null && message.hasOwnProperty("originalStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.originalStream); + if (error) + return "originalStream." + error; + } + if (message.fraction != null && message.hasOwnProperty("fraction")) + if (typeof message.fraction !== "number") + return "fraction: number expected"; + return null; + }; + + /** + * Creates a SplitReadStreamRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} SplitReadStreamRequest + */ + SplitReadStreamRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); + if (object.originalStream != null) { + if (typeof object.originalStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest.originalStream: object expected"); + message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.originalStream); + } + if (object.fraction != null) + message.fraction = Number(object.fraction); + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest} message SplitReadStreamRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.originalStream = null; + object.fraction = 0; + } + if (message.originalStream != null && message.hasOwnProperty("originalStream")) + object.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.originalStream, options); + if (message.fraction != null && message.hasOwnProperty("fraction")) + object.fraction = options.json && !isFinite(message.fraction) ? String(message.fraction) : message.fraction; + return object; + }; + + /** + * Converts this SplitReadStreamRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SplitReadStreamRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest"; + }; + + return SplitReadStreamRequest; + })(); + + v1beta1.SplitReadStreamResponse = (function() { + + /** + * Properties of a SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ISplitReadStreamResponse + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [primaryStream] SplitReadStreamResponse primaryStream + * @property {google.cloud.bigquery.storage.v1beta1.IStream|null} [remainderStream] SplitReadStreamResponse remainderStream + */ + + /** + * Constructs a new SplitReadStreamResponse. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a SplitReadStreamResponse. + * @implements ISplitReadStreamResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + */ + function SplitReadStreamResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SplitReadStreamResponse primaryStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} primaryStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.primaryStream = null; + + /** + * SplitReadStreamResponse remainderStream. + * @member {google.cloud.bigquery.storage.v1beta1.IStream|null|undefined} remainderStream + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + */ + SplitReadStreamResponse.prototype.remainderStream = null; + + /** + * Creates a new SplitReadStreamResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse instance + */ + SplitReadStreamResponse.create = function create(properties) { + return new SplitReadStreamResponse(properties); + }; + + /** + * Encodes the specified SplitReadStreamResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.primaryStream != null && Object.hasOwnProperty.call(message, "primaryStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.primaryStream, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.remainderStream != null && Object.hasOwnProperty.call(message, "remainderStream")) + $root.google.cloud.bigquery.storage.v1beta1.Stream.encode(message.remainderStream, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SplitReadStreamResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse} message SplitReadStreamResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SplitReadStreamResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + case 2: { + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SplitReadStreamResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SplitReadStreamResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SplitReadStreamResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SplitReadStreamResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.primaryStream); + if (error) + return "primaryStream." + error; + } + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) { + var error = $root.google.cloud.bigquery.storage.v1beta1.Stream.verify(message.remainderStream); + if (error) + return "remainderStream." + error; + } + return null; + }; + + /** + * Creates a SplitReadStreamResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} SplitReadStreamResponse + */ + SplitReadStreamResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); + if (object.primaryStream != null) { + if (typeof object.primaryStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.primaryStream: object expected"); + message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.primaryStream); + } + if (object.remainderStream != null) { + if (typeof object.remainderStream !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse.remainderStream: object expected"); + message.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.fromObject(object.remainderStream); + } + return message; + }; + + /** + * Creates a plain object from a SplitReadStreamResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse} message SplitReadStreamResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SplitReadStreamResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.primaryStream = null; + object.remainderStream = null; + } + if (message.primaryStream != null && message.hasOwnProperty("primaryStream")) + object.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.primaryStream, options); + if (message.remainderStream != null && message.hasOwnProperty("remainderStream")) + object.remainderStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.toObject(message.remainderStream, options); + return object; + }; + + /** + * Converts this SplitReadStreamResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @instance + * @returns {Object.} JSON object + */ + SplitReadStreamResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SplitReadStreamResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SplitReadStreamResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"; + }; + + return SplitReadStreamResponse; + })(); + + v1beta1.TableReference = (function() { + + /** + * Properties of a TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableReference + * @property {string|null} [projectId] TableReference projectId + * @property {string|null} [datasetId] TableReference datasetId + * @property {string|null} [tableId] TableReference tableId + */ + + /** + * Constructs a new TableReference. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableReference. + * @implements ITableReference + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + */ + function TableReference(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableReference projectId. + * @member {string} projectId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.projectId = ""; + + /** + * TableReference datasetId. + * @member {string} datasetId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.datasetId = ""; + + /** + * TableReference tableId. + * @member {string} tableId + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + */ + TableReference.prototype.tableId = ""; + + /** + * Creates a new TableReference instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference instance + */ + TableReference.create = function create(properties) { + return new TableReference(properties); + }; + + /** + * Encodes the specified TableReference message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.projectId != null && Object.hasOwnProperty.call(message, "projectId")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.projectId); + if (message.datasetId != null && Object.hasOwnProperty.call(message, "datasetId")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.datasetId); + if (message.tableId != null && Object.hasOwnProperty.call(message, "tableId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.tableId); + return writer; + }; + + /** + * Encodes the specified TableReference message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableReference.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableReference} message TableReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableReference.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableReference message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.projectId = reader.string(); + break; + } + case 2: { + message.datasetId = reader.string(); + break; + } + case 3: { + message.tableId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableReference message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableReference.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableReference message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableReference.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.projectId != null && message.hasOwnProperty("projectId")) + if (!$util.isString(message.projectId)) + return "projectId: string expected"; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + if (!$util.isString(message.datasetId)) + return "datasetId: string expected"; + if (message.tableId != null && message.hasOwnProperty("tableId")) + if (!$util.isString(message.tableId)) + return "tableId: string expected"; + return null; + }; + + /** + * Creates a TableReference message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableReference} TableReference + */ + TableReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableReference) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); + if (object.projectId != null) + message.projectId = String(object.projectId); + if (object.datasetId != null) + message.datasetId = String(object.datasetId); + if (object.tableId != null) + message.tableId = String(object.tableId); + return message; + }; + + /** + * Creates a plain object from a TableReference message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableReference} message TableReference + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableReference.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.projectId = ""; + object.datasetId = ""; + object.tableId = ""; + } + if (message.projectId != null && message.hasOwnProperty("projectId")) + object.projectId = message.projectId; + if (message.datasetId != null && message.hasOwnProperty("datasetId")) + object.datasetId = message.datasetId; + if (message.tableId != null && message.hasOwnProperty("tableId")) + object.tableId = message.tableId; + return object; + }; + + /** + * Converts this TableReference to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @instance + * @returns {Object.} JSON object + */ + TableReference.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableReference + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableReference + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableReference"; + }; + + return TableReference; + })(); + + v1beta1.TableModifiers = (function() { + + /** + * Properties of a TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @interface ITableModifiers + * @property {google.protobuf.ITimestamp|null} [snapshotTime] TableModifiers snapshotTime + */ + + /** + * Constructs a new TableModifiers. + * @memberof google.cloud.bigquery.storage.v1beta1 + * @classdesc Represents a TableModifiers. + * @implements ITableModifiers + * @constructor + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + */ + function TableModifiers(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TableModifiers snapshotTime. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTime + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + */ + TableModifiers.prototype.snapshotTime = null; + + /** + * Creates a new TableModifiers instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers instance + */ + TableModifiers.create = function create(properties) { + return new TableModifiers(properties); + }; + + /** + * Encodes the specified TableModifiers message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.snapshotTime != null && Object.hasOwnProperty.call(message, "snapshotTime")) + $root.google.protobuf.Timestamp.encode(message.snapshotTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified TableModifiers message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta1.TableModifiers.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.ITableModifiers} message TableModifiers message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TableModifiers.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a TableModifiers message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TableModifiers.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a TableModifiers message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TableModifiers.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTime); + if (error) + return "snapshotTime." + error; + } + return null; + }; + + /** + * Creates a TableModifiers message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta1.TableModifiers} TableModifiers + */ + TableModifiers.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta1.TableModifiers) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); + if (object.snapshotTime != null) { + if (typeof object.snapshotTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.TableModifiers.snapshotTime: object expected"); + message.snapshotTime = $root.google.protobuf.Timestamp.fromObject(object.snapshotTime); + } + return message; + }; + + /** + * Creates a plain object from a TableModifiers message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} message TableModifiers + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + TableModifiers.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.snapshotTime = null; + if (message.snapshotTime != null && message.hasOwnProperty("snapshotTime")) + object.snapshotTime = $root.google.protobuf.Timestamp.toObject(message.snapshotTime, options); + return object; + }; + + /** + * Converts this TableModifiers to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @instance + * @returns {Object.} JSON object + */ + TableModifiers.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for TableModifiers + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta1.TableModifiers + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + TableModifiers.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta1.TableModifiers"; + }; + + return TableModifiers; + })(); + + return v1beta1; + })(); + + return storage; + })(); + + return bigquery; + })(); + + return cloud; + })(); + + google.protobuf = (function() { + + /** + * Namespace protobuf. + * @memberof google + * @namespace + */ + var protobuf = {}; + + protobuf.FileDescriptorSet = (function() { + + /** + * Properties of a FileDescriptorSet. + * @memberof google.protobuf + * @interface IFileDescriptorSet + * @property {Array.|null} [file] FileDescriptorSet file + */ + + /** + * Constructs a new FileDescriptorSet. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorSet. + * @implements IFileDescriptorSet + * @constructor + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + */ + function FileDescriptorSet(properties) { + this.file = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorSet file. + * @member {Array.} file + * @memberof google.protobuf.FileDescriptorSet + * @instance + */ + FileDescriptorSet.prototype.file = $util.emptyArray; + + /** + * Creates a new FileDescriptorSet instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet instance + */ + FileDescriptorSet.create = function create(properties) { + return new FileDescriptorSet(properties); + }; + + /** + * Encodes the specified FileDescriptorSet message. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.file != null && message.file.length) + for (var i = 0; i < message.file.length; ++i) + $root.google.protobuf.FileDescriptorProto.encode(message.file[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FileDescriptorSet message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorSet.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.IFileDescriptorSet} message FileDescriptorSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorSet.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.file && message.file.length)) + message.file = []; + message.file.push($root.google.protobuf.FileDescriptorProto.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorSet message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorSet.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorSet message. + * @function verify + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorSet.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.file != null && message.hasOwnProperty("file")) { + if (!Array.isArray(message.file)) + return "file: array expected"; + for (var i = 0; i < message.file.length; ++i) { + var error = $root.google.protobuf.FileDescriptorProto.verify(message.file[i]); + if (error) + return "file." + error; + } + } + return null; + }; + + /** + * Creates a FileDescriptorSet message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorSet} FileDescriptorSet + */ + FileDescriptorSet.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorSet) + return object; + var message = new $root.google.protobuf.FileDescriptorSet(); + if (object.file) { + if (!Array.isArray(object.file)) + throw TypeError(".google.protobuf.FileDescriptorSet.file: array expected"); + message.file = []; + for (var i = 0; i < object.file.length; ++i) { + if (typeof object.file[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorSet.file: object expected"); + message.file[i] = $root.google.protobuf.FileDescriptorProto.fromObject(object.file[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a FileDescriptorSet message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {google.protobuf.FileDescriptorSet} message FileDescriptorSet + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorSet.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.file = []; + if (message.file && message.file.length) { + object.file = []; + for (var j = 0; j < message.file.length; ++j) + object.file[j] = $root.google.protobuf.FileDescriptorProto.toObject(message.file[j], options); + } + return object; + }; + + /** + * Converts this FileDescriptorSet to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorSet + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorSet.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FileDescriptorSet + * @function getTypeUrl + * @memberof google.protobuf.FileDescriptorSet + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileDescriptorSet.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileDescriptorSet"; + }; + + return FileDescriptorSet; + })(); + + protobuf.FileDescriptorProto = (function() { + + /** + * Properties of a FileDescriptorProto. + * @memberof google.protobuf + * @interface IFileDescriptorProto + * @property {string|null} [name] FileDescriptorProto name + * @property {string|null} ["package"] FileDescriptorProto package + * @property {Array.|null} [dependency] FileDescriptorProto dependency + * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency + * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [messageType] FileDescriptorProto messageType + * @property {Array.|null} [enumType] FileDescriptorProto enumType + * @property {Array.|null} [service] FileDescriptorProto service + * @property {Array.|null} [extension] FileDescriptorProto extension + * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options + * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo + * @property {string|null} [syntax] FileDescriptorProto syntax + * @property {string|null} [edition] FileDescriptorProto edition + */ + + /** + * Constructs a new FileDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FileDescriptorProto. + * @implements IFileDescriptorProto + * @constructor + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + */ + function FileDescriptorProto(properties) { + this.dependency = []; + this.publicDependency = []; + this.weakDependency = []; + this.messageType = []; + this.enumType = []; + this.service = []; + this.extension = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.name = ""; + + /** + * FileDescriptorProto package. + * @member {string} package + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype["package"] = ""; + + /** + * FileDescriptorProto dependency. + * @member {Array.} dependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.dependency = $util.emptyArray; + + /** + * FileDescriptorProto publicDependency. + * @member {Array.} publicDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.publicDependency = $util.emptyArray; + + /** + * FileDescriptorProto weakDependency. + * @member {Array.} weakDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + + /** + * FileDescriptorProto messageType. + * @member {Array.} messageType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.messageType = $util.emptyArray; + + /** + * FileDescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * FileDescriptorProto service. + * @member {Array.} service + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.service = $util.emptyArray; + + /** + * FileDescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.extension = $util.emptyArray; + + /** + * FileDescriptorProto options. + * @member {google.protobuf.IFileOptions|null|undefined} options + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.options = null; + + /** + * FileDescriptorProto sourceCodeInfo. + * @member {google.protobuf.ISourceCodeInfo|null|undefined} sourceCodeInfo + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.sourceCodeInfo = null; + + /** + * FileDescriptorProto syntax. + * @member {string} syntax + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.syntax = ""; + + /** + * FileDescriptorProto edition. + * @member {string} edition + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.edition = ""; + + /** + * Creates a new FileDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto instance + */ + FileDescriptorProto.create = function create(properties) { + return new FileDescriptorProto(properties); + }; + + /** + * Encodes the specified FileDescriptorProto message. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); + if (message.dependency != null && message.dependency.length) + for (var i = 0; i < message.dependency.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.dependency[i]); + if (message.messageType != null && message.messageType.length) + for (var i = 0; i < message.messageType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.messageType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.service != null && message.service.length) + for (var i = 0; i < message.service.length; ++i) + $root.google.protobuf.ServiceDescriptorProto.encode(message.service[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) + $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.publicDependency != null && message.publicDependency.length) + for (var i = 0; i < message.publicDependency.length; ++i) + writer.uint32(/* id 10, wireType 0 =*/80).int32(message.publicDependency[i]); + if (message.weakDependency != null && message.weakDependency.length) + for (var i = 0; i < message.weakDependency.length; ++i) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); + if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); + if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) + writer.uint32(/* id 13, wireType 2 =*/106).string(message.edition); + return writer; + }; + + /** + * Encodes the specified FileDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FileDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.IFileDescriptorProto} message FileDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message["package"] = reader.string(); + break; + } + case 3: { + if (!(message.dependency && message.dependency.length)) + message.dependency = []; + message.dependency.push(reader.string()); + break; + } + case 10: { + if (!(message.publicDependency && message.publicDependency.length)) + message.publicDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.publicDependency.push(reader.int32()); + } else + message.publicDependency.push(reader.int32()); + break; + } + case 11: { + if (!(message.weakDependency && message.weakDependency.length)) + message.weakDependency = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.weakDependency.push(reader.int32()); + } else + message.weakDependency.push(reader.int32()); + break; + } + case 4: { + if (!(message.messageType && message.messageType.length)) + message.messageType = []; + message.messageType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 6: { + if (!(message.service && message.service.length)) + message.service = []; + message.service.push($root.google.protobuf.ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 7: { + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 8: { + message.options = $root.google.protobuf.FileOptions.decode(reader, reader.uint32()); + break; + } + case 9: { + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.decode(reader, reader.uint32()); + break; + } + case 12: { + message.syntax = reader.string(); + break; + } + case 13: { + message.edition = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileDescriptorProto message. + * @function verify + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message["package"] != null && message.hasOwnProperty("package")) + if (!$util.isString(message["package"])) + return "package: string expected"; + if (message.dependency != null && message.hasOwnProperty("dependency")) { + if (!Array.isArray(message.dependency)) + return "dependency: array expected"; + for (var i = 0; i < message.dependency.length; ++i) + if (!$util.isString(message.dependency[i])) + return "dependency: string[] expected"; + } + if (message.publicDependency != null && message.hasOwnProperty("publicDependency")) { + if (!Array.isArray(message.publicDependency)) + return "publicDependency: array expected"; + for (var i = 0; i < message.publicDependency.length; ++i) + if (!$util.isInteger(message.publicDependency[i])) + return "publicDependency: integer[] expected"; + } + if (message.weakDependency != null && message.hasOwnProperty("weakDependency")) { + if (!Array.isArray(message.weakDependency)) + return "weakDependency: array expected"; + for (var i = 0; i < message.weakDependency.length; ++i) + if (!$util.isInteger(message.weakDependency[i])) + return "weakDependency: integer[] expected"; + } + if (message.messageType != null && message.hasOwnProperty("messageType")) { + if (!Array.isArray(message.messageType)) + return "messageType: array expected"; + for (var i = 0; i < message.messageType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.messageType[i]); + if (error) + return "messageType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.service != null && message.hasOwnProperty("service")) { + if (!Array.isArray(message.service)) + return "service: array expected"; + for (var i = 0; i < message.service.length; ++i) { + var error = $root.google.protobuf.ServiceDescriptorProto.verify(message.service[i]); + if (error) + return "service." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FileOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) { + var error = $root.google.protobuf.SourceCodeInfo.verify(message.sourceCodeInfo); + if (error) + return "sourceCodeInfo." + error; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + if (!$util.isString(message.syntax)) + return "syntax: string expected"; + if (message.edition != null && message.hasOwnProperty("edition")) + if (!$util.isString(message.edition)) + return "edition: string expected"; + return null; + }; + + /** + * Creates a FileDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileDescriptorProto} FileDescriptorProto + */ + FileDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileDescriptorProto) + return object; + var message = new $root.google.protobuf.FileDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object["package"] != null) + message["package"] = String(object["package"]); + if (object.dependency) { + if (!Array.isArray(object.dependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.dependency: array expected"); + message.dependency = []; + for (var i = 0; i < object.dependency.length; ++i) + message.dependency[i] = String(object.dependency[i]); + } + if (object.publicDependency) { + if (!Array.isArray(object.publicDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.publicDependency: array expected"); + message.publicDependency = []; + for (var i = 0; i < object.publicDependency.length; ++i) + message.publicDependency[i] = object.publicDependency[i] | 0; + } + if (object.weakDependency) { + if (!Array.isArray(object.weakDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.weakDependency: array expected"); + message.weakDependency = []; + for (var i = 0; i < object.weakDependency.length; ++i) + message.weakDependency[i] = object.weakDependency[i] | 0; + } + if (object.messageType) { + if (!Array.isArray(object.messageType)) + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); + message.messageType = []; + for (var i = 0; i < object.messageType.length; ++i) { + if (typeof object.messageType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.messageType: object expected"); + message.messageType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.messageType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.service) { + if (!Array.isArray(object.service)) + throw TypeError(".google.protobuf.FileDescriptorProto.service: array expected"); + message.service = []; + for (var i = 0; i < object.service.length; ++i) { + if (typeof object.service[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.service: object expected"); + message.service[i] = $root.google.protobuf.ServiceDescriptorProto.fromObject(object.service[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.FileDescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FileOptions.fromObject(object.options); + } + if (object.sourceCodeInfo != null) { + if (typeof object.sourceCodeInfo !== "object") + throw TypeError(".google.protobuf.FileDescriptorProto.sourceCodeInfo: object expected"); + message.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.fromObject(object.sourceCodeInfo); + } + if (object.syntax != null) + message.syntax = String(object.syntax); + if (object.edition != null) + message.edition = String(object.edition); + return message; + }; + + /** + * Creates a plain object from a FileDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {google.protobuf.FileDescriptorProto} message FileDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.dependency = []; + object.messageType = []; + object.enumType = []; + object.service = []; + object.extension = []; + object.publicDependency = []; + object.weakDependency = []; + } + if (options.defaults) { + object.name = ""; + object["package"] = ""; + object.options = null; + object.sourceCodeInfo = null; + object.syntax = ""; + object.edition = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message["package"] != null && message.hasOwnProperty("package")) + object["package"] = message["package"]; + if (message.dependency && message.dependency.length) { + object.dependency = []; + for (var j = 0; j < message.dependency.length; ++j) + object.dependency[j] = message.dependency[j]; + } + if (message.messageType && message.messageType.length) { + object.messageType = []; + for (var j = 0; j < message.messageType.length; ++j) + object.messageType[j] = $root.google.protobuf.DescriptorProto.toObject(message.messageType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.service && message.service.length) { + object.service = []; + for (var j = 0; j < message.service.length; ++j) + object.service[j] = $root.google.protobuf.ServiceDescriptorProto.toObject(message.service[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FileOptions.toObject(message.options, options); + if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + object.sourceCodeInfo = $root.google.protobuf.SourceCodeInfo.toObject(message.sourceCodeInfo, options); + if (message.publicDependency && message.publicDependency.length) { + object.publicDependency = []; + for (var j = 0; j < message.publicDependency.length; ++j) + object.publicDependency[j] = message.publicDependency[j]; + } + if (message.weakDependency && message.weakDependency.length) { + object.weakDependency = []; + for (var j = 0; j < message.weakDependency.length; ++j) + object.weakDependency[j] = message.weakDependency[j]; + } + if (message.syntax != null && message.hasOwnProperty("syntax")) + object.syntax = message.syntax; + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = message.edition; + return object; + }; + + /** + * Converts this FileDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FileDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FileDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FileDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.FileDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileDescriptorProto"; + }; + + return FileDescriptorProto; + })(); + + protobuf.DescriptorProto = (function() { + + /** + * Properties of a DescriptorProto. + * @memberof google.protobuf + * @interface IDescriptorProto + * @property {string|null} [name] DescriptorProto name + * @property {Array.|null} [field] DescriptorProto field + * @property {Array.|null} [extension] DescriptorProto extension + * @property {Array.|null} [nestedType] DescriptorProto nestedType + * @property {Array.|null} [enumType] DescriptorProto enumType + * @property {Array.|null} [extensionRange] DescriptorProto extensionRange + * @property {Array.|null} [oneofDecl] DescriptorProto oneofDecl + * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options + * @property {Array.|null} [reservedRange] DescriptorProto reservedRange + * @property {Array.|null} [reservedName] DescriptorProto reservedName + */ + + /** + * Constructs a new DescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a DescriptorProto. + * @implements IDescriptorProto + * @constructor + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + */ + function DescriptorProto(properties) { + this.field = []; + this.extension = []; + this.nestedType = []; + this.enumType = []; + this.extensionRange = []; + this.oneofDecl = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DescriptorProto name. + * @member {string} name + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.name = ""; + + /** + * DescriptorProto field. + * @member {Array.} field + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.field = $util.emptyArray; + + /** + * DescriptorProto extension. + * @member {Array.} extension + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extension = $util.emptyArray; + + /** + * DescriptorProto nestedType. + * @member {Array.} nestedType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.nestedType = $util.emptyArray; + + /** + * DescriptorProto enumType. + * @member {Array.} enumType + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.enumType = $util.emptyArray; + + /** + * DescriptorProto extensionRange. + * @member {Array.} extensionRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.extensionRange = $util.emptyArray; + + /** + * DescriptorProto oneofDecl. + * @member {Array.} oneofDecl + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.oneofDecl = $util.emptyArray; + + /** + * DescriptorProto options. + * @member {google.protobuf.IMessageOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.options = null; + + /** + * DescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedRange = $util.emptyArray; + + /** + * DescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.reservedName = $util.emptyArray; + + /** + * Creates a new DescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto} DescriptorProto instance + */ + DescriptorProto.create = function create(properties) { + return new DescriptorProto(properties); + }; + + /** + * Encodes the specified DescriptorProto message. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.field != null && message.field.length) + for (var i = 0; i < message.field.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.field[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.nestedType != null && message.nestedType.length) + for (var i = 0; i < message.nestedType.length; ++i) + $root.google.protobuf.DescriptorProto.encode(message.nestedType[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.enumType != null && message.enumType.length) + for (var i = 0; i < message.enumType.length; ++i) + $root.google.protobuf.EnumDescriptorProto.encode(message.enumType[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.extensionRange != null && message.extensionRange.length) + for (var i = 0; i < message.extensionRange.length; ++i) + $root.google.protobuf.DescriptorProto.ExtensionRange.encode(message.extensionRange[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.extension != null && message.extension.length) + for (var i = 0; i < message.extension.length; ++i) + $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.oneofDecl != null && message.oneofDecl.length) + for (var i = 0; i < message.oneofDecl.length; ++i) + $root.google.protobuf.OneofDescriptorProto.encode(message.oneofDecl[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.DescriptorProto.ReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + return writer; + }; + + /** + * Encodes the specified DescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.IDescriptorProto} message DescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.field && message.field.length)) + message.field = []; + message.field.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 6: { + if (!(message.extension && message.extension.length)) + message.extension = []; + message.extension.push($root.google.protobuf.FieldDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + if (!(message.nestedType && message.nestedType.length)) + message.nestedType = []; + message.nestedType.push($root.google.protobuf.DescriptorProto.decode(reader, reader.uint32())); + break; + } + case 4: { + if (!(message.enumType && message.enumType.length)) + message.enumType = []; + message.enumType.push($root.google.protobuf.EnumDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.extensionRange && message.extensionRange.length)) + message.extensionRange = []; + message.extensionRange.push($root.google.protobuf.DescriptorProto.ExtensionRange.decode(reader, reader.uint32())); + break; + } + case 8: { + if (!(message.oneofDecl && message.oneofDecl.length)) + message.oneofDecl = []; + message.oneofDecl.push($root.google.protobuf.OneofDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 7: { + message.options = $root.google.protobuf.MessageOptions.decode(reader, reader.uint32()); + break; + } + case 9: { + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.DescriptorProto.ReservedRange.decode(reader, reader.uint32())); + break; + } + case 10: { + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto} DescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DescriptorProto message. + * @function verify + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.field != null && message.hasOwnProperty("field")) { + if (!Array.isArray(message.field)) + return "field: array expected"; + for (var i = 0; i < message.field.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.field[i]); + if (error) + return "field." + error; + } + } + if (message.extension != null && message.hasOwnProperty("extension")) { + if (!Array.isArray(message.extension)) + return "extension: array expected"; + for (var i = 0; i < message.extension.length; ++i) { + var error = $root.google.protobuf.FieldDescriptorProto.verify(message.extension[i]); + if (error) + return "extension." + error; + } + } + if (message.nestedType != null && message.hasOwnProperty("nestedType")) { + if (!Array.isArray(message.nestedType)) + return "nestedType: array expected"; + for (var i = 0; i < message.nestedType.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.verify(message.nestedType[i]); + if (error) + return "nestedType." + error; + } + } + if (message.enumType != null && message.hasOwnProperty("enumType")) { + if (!Array.isArray(message.enumType)) + return "enumType: array expected"; + for (var i = 0; i < message.enumType.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.verify(message.enumType[i]); + if (error) + return "enumType." + error; + } + } + if (message.extensionRange != null && message.hasOwnProperty("extensionRange")) { + if (!Array.isArray(message.extensionRange)) + return "extensionRange: array expected"; + for (var i = 0; i < message.extensionRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ExtensionRange.verify(message.extensionRange[i]); + if (error) + return "extensionRange." + error; + } + } + if (message.oneofDecl != null && message.hasOwnProperty("oneofDecl")) { + if (!Array.isArray(message.oneofDecl)) + return "oneofDecl: array expected"; + for (var i = 0; i < message.oneofDecl.length; ++i) { + var error = $root.google.protobuf.OneofDescriptorProto.verify(message.oneofDecl[i]); + if (error) + return "oneofDecl." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MessageOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.DescriptorProto.ReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates a DescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto} DescriptorProto + */ + DescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto) + return object; + var message = new $root.google.protobuf.DescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.field) { + if (!Array.isArray(object.field)) + throw TypeError(".google.protobuf.DescriptorProto.field: array expected"); + message.field = []; + for (var i = 0; i < object.field.length; ++i) { + if (typeof object.field[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.field: object expected"); + message.field[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.field[i]); + } + } + if (object.extension) { + if (!Array.isArray(object.extension)) + throw TypeError(".google.protobuf.DescriptorProto.extension: array expected"); + message.extension = []; + for (var i = 0; i < object.extension.length; ++i) { + if (typeof object.extension[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extension: object expected"); + message.extension[i] = $root.google.protobuf.FieldDescriptorProto.fromObject(object.extension[i]); + } + } + if (object.nestedType) { + if (!Array.isArray(object.nestedType)) + throw TypeError(".google.protobuf.DescriptorProto.nestedType: array expected"); + message.nestedType = []; + for (var i = 0; i < object.nestedType.length; ++i) { + if (typeof object.nestedType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.nestedType: object expected"); + message.nestedType[i] = $root.google.protobuf.DescriptorProto.fromObject(object.nestedType[i]); + } + } + if (object.enumType) { + if (!Array.isArray(object.enumType)) + throw TypeError(".google.protobuf.DescriptorProto.enumType: array expected"); + message.enumType = []; + for (var i = 0; i < object.enumType.length; ++i) { + if (typeof object.enumType[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.enumType: object expected"); + message.enumType[i] = $root.google.protobuf.EnumDescriptorProto.fromObject(object.enumType[i]); + } + } + if (object.extensionRange) { + if (!Array.isArray(object.extensionRange)) + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: array expected"); + message.extensionRange = []; + for (var i = 0; i < object.extensionRange.length; ++i) { + if (typeof object.extensionRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.extensionRange: object expected"); + message.extensionRange[i] = $root.google.protobuf.DescriptorProto.ExtensionRange.fromObject(object.extensionRange[i]); + } + } + if (object.oneofDecl) { + if (!Array.isArray(object.oneofDecl)) + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: array expected"); + message.oneofDecl = []; + for (var i = 0; i < object.oneofDecl.length; ++i) { + if (typeof object.oneofDecl[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.oneofDecl: object expected"); + message.oneofDecl[i] = $root.google.protobuf.OneofDescriptorProto.fromObject(object.oneofDecl[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MessageOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.DescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.DescriptorProto.ReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.DescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from a DescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto + * @static + * @param {google.protobuf.DescriptorProto} message DescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.field = []; + object.nestedType = []; + object.enumType = []; + object.extensionRange = []; + object.extension = []; + object.oneofDecl = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.field && message.field.length) { + object.field = []; + for (var j = 0; j < message.field.length; ++j) + object.field[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.field[j], options); + } + if (message.nestedType && message.nestedType.length) { + object.nestedType = []; + for (var j = 0; j < message.nestedType.length; ++j) + object.nestedType[j] = $root.google.protobuf.DescriptorProto.toObject(message.nestedType[j], options); + } + if (message.enumType && message.enumType.length) { + object.enumType = []; + for (var j = 0; j < message.enumType.length; ++j) + object.enumType[j] = $root.google.protobuf.EnumDescriptorProto.toObject(message.enumType[j], options); + } + if (message.extensionRange && message.extensionRange.length) { + object.extensionRange = []; + for (var j = 0; j < message.extensionRange.length; ++j) + object.extensionRange[j] = $root.google.protobuf.DescriptorProto.ExtensionRange.toObject(message.extensionRange[j], options); + } + if (message.extension && message.extension.length) { + object.extension = []; + for (var j = 0; j < message.extension.length; ++j) + object.extension[j] = $root.google.protobuf.FieldDescriptorProto.toObject(message.extension[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MessageOptions.toObject(message.options, options); + if (message.oneofDecl && message.oneofDecl.length) { + object.oneofDecl = []; + for (var j = 0; j < message.oneofDecl.length; ++j) + object.oneofDecl[j] = $root.google.protobuf.OneofDescriptorProto.toObject(message.oneofDecl[j], options); + } + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.DescriptorProto.ReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; + }; + + /** + * Converts this DescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto + * @instance + * @returns {Object.} JSON object + */ + DescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto"; + }; + + DescriptorProto.ExtensionRange = (function() { + + /** + * Properties of an ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @interface IExtensionRange + * @property {number|null} [start] ExtensionRange start + * @property {number|null} [end] ExtensionRange end + * @property {google.protobuf.IExtensionRangeOptions|null} [options] ExtensionRange options + */ + + /** + * Constructs a new ExtensionRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents an ExtensionRange. + * @implements IExtensionRange + * @constructor + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + */ + function ExtensionRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.start = 0; + + /** + * ExtensionRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.end = 0; + + /** + * ExtensionRange options. + * @member {google.protobuf.IExtensionRangeOptions|null|undefined} options + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + */ + ExtensionRange.prototype.options = null; + + /** + * Creates a new ExtensionRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange instance + */ + ExtensionRange.create = function create(properties) { + return new ExtensionRange(properties); + }; + + /** + * Encodes the specified ExtensionRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ExtensionRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.IExtensionRange} message ExtensionRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } + case 3: { + message.options = $root.google.protobuf.ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ExtensionRangeOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an ExtensionRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ExtensionRange} ExtensionRange + */ + ExtensionRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ExtensionRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.DescriptorProto.ExtensionRange.options: object expected"); + message.options = $root.google.protobuf.ExtensionRangeOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {google.protobuf.DescriptorProto.ExtensionRange} message ExtensionRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + object.options = null; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ExtensionRangeOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this ExtensionRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @instance + * @returns {Object.} JSON object + */ + ExtensionRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ExtensionRange + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto.ExtensionRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExtensionRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto.ExtensionRange"; + }; + + return ExtensionRange; + })(); + + DescriptorProto.ReservedRange = (function() { + + /** + * Properties of a ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @interface IReservedRange + * @property {number|null} [start] ReservedRange start + * @property {number|null} [end] ReservedRange end + */ + + /** + * Constructs a new ReservedRange. + * @memberof google.protobuf.DescriptorProto + * @classdesc Represents a ReservedRange. + * @implements IReservedRange + * @constructor + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + */ + function ReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReservedRange start. + * @member {number} start + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.start = 0; + + /** + * ReservedRange end. + * @member {number} end + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + */ + ReservedRange.prototype.end = 0; + + /** + * Creates a new ReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange=} [properties] Properties to set + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange instance + */ + ReservedRange.create = function create(properties) { + return new ReservedRange(properties); + }; + + /** + * Encodes the specified ReservedRange message. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified ReservedRange message, length delimited. Does not implicitly {@link google.protobuf.DescriptorProto.ReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.IReservedRange} message ReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReservedRange message. + * @function verify + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates a ReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DescriptorProto.ReservedRange} ReservedRange + */ + ReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DescriptorProto.ReservedRange) + return object; + var message = new $root.google.protobuf.DescriptorProto.ReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from a ReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {google.protobuf.DescriptorProto.ReservedRange} message ReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this ReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @instance + * @returns {Object.} JSON object + */ + ReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReservedRange + * @function getTypeUrl + * @memberof google.protobuf.DescriptorProto.ReservedRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DescriptorProto.ReservedRange"; + }; + + return ReservedRange; + })(); + + return DescriptorProto; + })(); + + protobuf.ExtensionRangeOptions = (function() { + + /** + * Properties of an ExtensionRangeOptions. + * @memberof google.protobuf + * @interface IExtensionRangeOptions + * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption + */ + + /** + * Constructs a new ExtensionRangeOptions. + * @memberof google.protobuf + * @classdesc Represents an ExtensionRangeOptions. + * @implements IExtensionRangeOptions + * @constructor + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + */ + function ExtensionRangeOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExtensionRangeOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new ExtensionRangeOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions=} [properties] Properties to set + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions instance + */ + ExtensionRangeOptions.create = function create(properties) { + return new ExtensionRangeOptions(properties); + }; + + /** + * Encodes the specified ExtensionRangeOptions message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ExtensionRangeOptions message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.IExtensionRangeOptions} message ExtensionRangeOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExtensionRangeOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExtensionRangeOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExtensionRangeOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExtensionRangeOptions message. + * @function verify + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExtensionRangeOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an ExtensionRangeOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ExtensionRangeOptions} ExtensionRangeOptions + */ + ExtensionRangeOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ExtensionRangeOptions) + return object; + var message = new $root.google.protobuf.ExtensionRangeOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.ExtensionRangeOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an ExtensionRangeOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {google.protobuf.ExtensionRangeOptions} message ExtensionRangeOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExtensionRangeOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this ExtensionRangeOptions to JSON. + * @function toJSON + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + * @returns {Object.} JSON object + */ + ExtensionRangeOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ExtensionRangeOptions + * @function getTypeUrl + * @memberof google.protobuf.ExtensionRangeOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExtensionRangeOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ExtensionRangeOptions"; + }; + + return ExtensionRangeOptions; + })(); + + protobuf.FieldDescriptorProto = (function() { + + /** + * Properties of a FieldDescriptorProto. + * @memberof google.protobuf + * @interface IFieldDescriptorProto + * @property {string|null} [name] FieldDescriptorProto name + * @property {number|null} [number] FieldDescriptorProto number + * @property {google.protobuf.FieldDescriptorProto.Label|null} [label] FieldDescriptorProto label + * @property {google.protobuf.FieldDescriptorProto.Type|null} [type] FieldDescriptorProto type + * @property {string|null} [typeName] FieldDescriptorProto typeName + * @property {string|null} [extendee] FieldDescriptorProto extendee + * @property {string|null} [defaultValue] FieldDescriptorProto defaultValue + * @property {number|null} [oneofIndex] FieldDescriptorProto oneofIndex + * @property {string|null} [jsonName] FieldDescriptorProto jsonName + * @property {google.protobuf.IFieldOptions|null} [options] FieldDescriptorProto options + * @property {boolean|null} [proto3Optional] FieldDescriptorProto proto3Optional + */ + + /** + * Constructs a new FieldDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a FieldDescriptorProto. + * @implements IFieldDescriptorProto + * @constructor + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + */ + function FieldDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.name = ""; + + /** + * FieldDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.number = 0; + + /** + * FieldDescriptorProto label. + * @member {google.protobuf.FieldDescriptorProto.Label} label + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.label = 1; + + /** + * FieldDescriptorProto type. + * @member {google.protobuf.FieldDescriptorProto.Type} type + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.type = 1; + + /** + * FieldDescriptorProto typeName. + * @member {string} typeName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.typeName = ""; + + /** + * FieldDescriptorProto extendee. + * @member {string} extendee + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.extendee = ""; + + /** + * FieldDescriptorProto defaultValue. + * @member {string} defaultValue + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.defaultValue = ""; + + /** + * FieldDescriptorProto oneofIndex. + * @member {number} oneofIndex + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.oneofIndex = 0; + + /** + * FieldDescriptorProto jsonName. + * @member {string} jsonName + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.jsonName = ""; + + /** + * FieldDescriptorProto options. + * @member {google.protobuf.IFieldOptions|null|undefined} options + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.options = null; + + /** + * FieldDescriptorProto proto3Optional. + * @member {boolean} proto3Optional + * @memberof google.protobuf.FieldDescriptorProto + * @instance + */ + FieldDescriptorProto.prototype.proto3Optional = false; + + /** + * Creates a new FieldDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto instance + */ + FieldDescriptorProto.create = function create(properties) { + return new FieldDescriptorProto(properties); + }; + + /** + * Encodes the specified FieldDescriptorProto message. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); + if (message.number != null && Object.hasOwnProperty.call(message, "number")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); + if (message.label != null && Object.hasOwnProperty.call(message, "label")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); + if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); + if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); + if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); + if (message.proto3Optional != null && Object.hasOwnProperty.call(message, "proto3Optional")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.proto3Optional); + return writer; + }; + + /** + * Encodes the specified FieldDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.FieldDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.IFieldDescriptorProto} message FieldDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 3: { + message.number = reader.int32(); + break; + } + case 4: { + message.label = reader.int32(); + break; + } + case 5: { + message.type = reader.int32(); + break; + } + case 6: { + message.typeName = reader.string(); + break; + } + case 2: { + message.extendee = reader.string(); + break; + } + case 7: { + message.defaultValue = reader.string(); + break; + } + case 9: { + message.oneofIndex = reader.int32(); + break; + } + case 10: { + message.jsonName = reader.string(); + break; + } + case 8: { + message.options = $root.google.protobuf.FieldOptions.decode(reader, reader.uint32()); + break; + } + case 17: { + message.proto3Optional = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldDescriptorProto message. + * @function verify + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.label != null && message.hasOwnProperty("label")) + switch (message.label) { + default: + return "label: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + case 17: + case 18: + break; + } + if (message.typeName != null && message.hasOwnProperty("typeName")) + if (!$util.isString(message.typeName)) + return "typeName: string expected"; + if (message.extendee != null && message.hasOwnProperty("extendee")) + if (!$util.isString(message.extendee)) + return "extendee: string expected"; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + if (!$util.isString(message.defaultValue)) + return "defaultValue: string expected"; + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + if (!$util.isInteger(message.oneofIndex)) + return "oneofIndex: integer expected"; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + if (!$util.isString(message.jsonName)) + return "jsonName: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.FieldOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + if (typeof message.proto3Optional !== "boolean") + return "proto3Optional: boolean expected"; + return null; + }; + + /** + * Creates a FieldDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldDescriptorProto} FieldDescriptorProto + */ + FieldDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldDescriptorProto) + return object; + var message = new $root.google.protobuf.FieldDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + switch (object.label) { + default: + if (typeof object.label === "number") { + message.label = object.label; + break; + } + break; + case "LABEL_OPTIONAL": + case 1: + message.label = 1; + break; + case "LABEL_REQUIRED": + case 2: + message.label = 2; + break; + case "LABEL_REPEATED": + case 3: + message.label = 3; + break; + } + switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; + case "TYPE_DOUBLE": + case 1: + message.type = 1; + break; + case "TYPE_FLOAT": + case 2: + message.type = 2; + break; + case "TYPE_INT64": + case 3: + message.type = 3; + break; + case "TYPE_UINT64": + case 4: + message.type = 4; + break; + case "TYPE_INT32": + case 5: + message.type = 5; + break; + case "TYPE_FIXED64": + case 6: + message.type = 6; + break; + case "TYPE_FIXED32": + case 7: + message.type = 7; + break; + case "TYPE_BOOL": + case 8: + message.type = 8; + break; + case "TYPE_STRING": + case 9: + message.type = 9; + break; + case "TYPE_GROUP": + case 10: + message.type = 10; + break; + case "TYPE_MESSAGE": + case 11: + message.type = 11; + break; + case "TYPE_BYTES": + case 12: + message.type = 12; + break; + case "TYPE_UINT32": + case 13: + message.type = 13; + break; + case "TYPE_ENUM": + case 14: + message.type = 14; + break; + case "TYPE_SFIXED32": + case 15: + message.type = 15; + break; + case "TYPE_SFIXED64": + case 16: + message.type = 16; + break; + case "TYPE_SINT32": + case 17: + message.type = 17; + break; + case "TYPE_SINT64": + case 18: + message.type = 18; + break; + } + if (object.typeName != null) + message.typeName = String(object.typeName); + if (object.extendee != null) + message.extendee = String(object.extendee); + if (object.defaultValue != null) + message.defaultValue = String(object.defaultValue); + if (object.oneofIndex != null) + message.oneofIndex = object.oneofIndex | 0; + if (object.jsonName != null) + message.jsonName = String(object.jsonName); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.FieldDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.FieldOptions.fromObject(object.options); + } + if (object.proto3Optional != null) + message.proto3Optional = Boolean(object.proto3Optional); + return message; + }; + + /** + * Creates a plain object from a FieldDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {google.protobuf.FieldDescriptorProto} message FieldDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.extendee = ""; + object.number = 0; + object.label = options.enums === String ? "LABEL_OPTIONAL" : 1; + object.type = options.enums === String ? "TYPE_DOUBLE" : 1; + object.typeName = ""; + object.defaultValue = ""; + object.options = null; + object.oneofIndex = 0; + object.jsonName = ""; + object.proto3Optional = false; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.extendee != null && message.hasOwnProperty("extendee")) + object.extendee = message.extendee; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.label != null && message.hasOwnProperty("label")) + object.label = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Label[message.label] === undefined ? message.label : $root.google.protobuf.FieldDescriptorProto.Label[message.label] : message.label; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.protobuf.FieldDescriptorProto.Type[message.type] === undefined ? message.type : $root.google.protobuf.FieldDescriptorProto.Type[message.type] : message.type; + if (message.typeName != null && message.hasOwnProperty("typeName")) + object.typeName = message.typeName; + if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + object.defaultValue = message.defaultValue; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.FieldOptions.toObject(message.options, options); + if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + object.oneofIndex = message.oneofIndex; + if (message.jsonName != null && message.hasOwnProperty("jsonName")) + object.jsonName = message.jsonName; + if (message.proto3Optional != null && message.hasOwnProperty("proto3Optional")) + object.proto3Optional = message.proto3Optional; + return object; + }; + + /** + * Converts this FieldDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.FieldDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + FieldDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.FieldDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldDescriptorProto"; + }; + + /** + * Type enum. + * @name google.protobuf.FieldDescriptorProto.Type + * @enum {number} + * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value + * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value + * @property {number} TYPE_INT64=3 TYPE_INT64 value + * @property {number} TYPE_UINT64=4 TYPE_UINT64 value + * @property {number} TYPE_INT32=5 TYPE_INT32 value + * @property {number} TYPE_FIXED64=6 TYPE_FIXED64 value + * @property {number} TYPE_FIXED32=7 TYPE_FIXED32 value + * @property {number} TYPE_BOOL=8 TYPE_BOOL value + * @property {number} TYPE_STRING=9 TYPE_STRING value + * @property {number} TYPE_GROUP=10 TYPE_GROUP value + * @property {number} TYPE_MESSAGE=11 TYPE_MESSAGE value + * @property {number} TYPE_BYTES=12 TYPE_BYTES value + * @property {number} TYPE_UINT32=13 TYPE_UINT32 value + * @property {number} TYPE_ENUM=14 TYPE_ENUM value + * @property {number} TYPE_SFIXED32=15 TYPE_SFIXED32 value + * @property {number} TYPE_SFIXED64=16 TYPE_SFIXED64 value + * @property {number} TYPE_SINT32=17 TYPE_SINT32 value + * @property {number} TYPE_SINT64=18 TYPE_SINT64 value + */ + FieldDescriptorProto.Type = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "TYPE_DOUBLE"] = 1; + values[valuesById[2] = "TYPE_FLOAT"] = 2; + values[valuesById[3] = "TYPE_INT64"] = 3; + values[valuesById[4] = "TYPE_UINT64"] = 4; + values[valuesById[5] = "TYPE_INT32"] = 5; + values[valuesById[6] = "TYPE_FIXED64"] = 6; + values[valuesById[7] = "TYPE_FIXED32"] = 7; + values[valuesById[8] = "TYPE_BOOL"] = 8; + values[valuesById[9] = "TYPE_STRING"] = 9; + values[valuesById[10] = "TYPE_GROUP"] = 10; + values[valuesById[11] = "TYPE_MESSAGE"] = 11; + values[valuesById[12] = "TYPE_BYTES"] = 12; + values[valuesById[13] = "TYPE_UINT32"] = 13; + values[valuesById[14] = "TYPE_ENUM"] = 14; + values[valuesById[15] = "TYPE_SFIXED32"] = 15; + values[valuesById[16] = "TYPE_SFIXED64"] = 16; + values[valuesById[17] = "TYPE_SINT32"] = 17; + values[valuesById[18] = "TYPE_SINT64"] = 18; + return values; + })(); + + /** + * Label enum. + * @name google.protobuf.FieldDescriptorProto.Label + * @enum {number} + * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value + * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value + * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value + */ + FieldDescriptorProto.Label = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "LABEL_OPTIONAL"] = 1; + values[valuesById[2] = "LABEL_REQUIRED"] = 2; + values[valuesById[3] = "LABEL_REPEATED"] = 3; + return values; + })(); + + return FieldDescriptorProto; + })(); + + protobuf.OneofDescriptorProto = (function() { + + /** + * Properties of an OneofDescriptorProto. + * @memberof google.protobuf + * @interface IOneofDescriptorProto + * @property {string|null} [name] OneofDescriptorProto name + * @property {google.protobuf.IOneofOptions|null} [options] OneofDescriptorProto options + */ + + /** + * Constructs a new OneofDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an OneofDescriptorProto. + * @implements IOneofDescriptorProto + * @constructor + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + */ + function OneofDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * OneofDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.name = ""; + + /** + * OneofDescriptorProto options. + * @member {google.protobuf.IOneofOptions|null|undefined} options + * @memberof google.protobuf.OneofDescriptorProto + * @instance + */ + OneofDescriptorProto.prototype.options = null; + + /** + * Creates a new OneofDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto instance + */ + OneofDescriptorProto.create = function create(properties) { + return new OneofDescriptorProto(properties); + }; + + /** + * Encodes the specified OneofDescriptorProto message. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified OneofDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.OneofDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.IOneofDescriptorProto} message OneofDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.options = $root.google.protobuf.OneofOptions.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an OneofDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an OneofDescriptorProto message. + * @function verify + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + OneofDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.OneofOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an OneofDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.OneofDescriptorProto} OneofDescriptorProto + */ + OneofDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofDescriptorProto) + return object; + var message = new $root.google.protobuf.OneofDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.OneofDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.OneofOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an OneofDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {google.protobuf.OneofDescriptorProto} message OneofDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + OneofDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.OneofOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this OneofDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.OneofDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + OneofDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for OneofDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.OneofDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + OneofDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.OneofDescriptorProto"; + }; + + return OneofDescriptorProto; + })(); + + protobuf.EnumDescriptorProto = (function() { + + /** + * Properties of an EnumDescriptorProto. + * @memberof google.protobuf + * @interface IEnumDescriptorProto + * @property {string|null} [name] EnumDescriptorProto name + * @property {Array.|null} [value] EnumDescriptorProto value + * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options + * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange + * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + */ + + /** + * Constructs a new EnumDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumDescriptorProto. + * @implements IEnumDescriptorProto + * @constructor + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + */ + function EnumDescriptorProto(properties) { + this.value = []; + this.reservedRange = []; + this.reservedName = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.name = ""; + + /** + * EnumDescriptorProto value. + * @member {Array.} value + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.value = $util.emptyArray; + + /** + * EnumDescriptorProto options. + * @member {google.protobuf.IEnumOptions|null|undefined} options + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.options = null; + + /** + * EnumDescriptorProto reservedRange. + * @member {Array.} reservedRange + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.reservedRange = $util.emptyArray; + + /** + * EnumDescriptorProto reservedName. + * @member {Array.} reservedName + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + + /** + * Creates a new EnumDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto instance + */ + EnumDescriptorProto.create = function create(properties) { + return new EnumDescriptorProto(properties); + }; + + /** + * Encodes the specified EnumDescriptorProto message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.value != null && message.value.length) + for (var i = 0; i < message.value.length; ++i) + $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.reservedRange != null && message.reservedRange.length) + for (var i = 0; i < message.reservedRange.length; ++i) + $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.encode(message.reservedRange[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.reservedName != null && message.reservedName.length) + for (var i = 0; i < message.reservedName.length; ++i) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + return writer; + }; + + /** + * Encodes the specified EnumDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.IEnumDescriptorProto} message EnumDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.value && message.value.length)) + message.value = []; + message.value.push($root.google.protobuf.EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + message.options = $root.google.protobuf.EnumOptions.decode(reader, reader.uint32()); + break; + } + case 4: { + if (!(message.reservedRange && message.reservedRange.length)) + message.reservedRange = []; + message.reservedRange.push($root.google.protobuf.EnumDescriptorProto.EnumReservedRange.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.reservedName && message.reservedName.length)) + message.reservedName = []; + message.reservedName.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumDescriptorProto message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.value != null && message.hasOwnProperty("value")) { + if (!Array.isArray(message.value)) + return "value: array expected"; + for (var i = 0; i < message.value.length; ++i) { + var error = $root.google.protobuf.EnumValueDescriptorProto.verify(message.value[i]); + if (error) + return "value." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.reservedRange != null && message.hasOwnProperty("reservedRange")) { + if (!Array.isArray(message.reservedRange)) + return "reservedRange: array expected"; + for (var i = 0; i < message.reservedRange.length; ++i) { + var error = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.verify(message.reservedRange[i]); + if (error) + return "reservedRange." + error; + } + } + if (message.reservedName != null && message.hasOwnProperty("reservedName")) { + if (!Array.isArray(message.reservedName)) + return "reservedName: array expected"; + for (var i = 0; i < message.reservedName.length; ++i) + if (!$util.isString(message.reservedName[i])) + return "reservedName: string[] expected"; + } + return null; + }; + + /** + * Creates an EnumDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto} EnumDescriptorProto + */ + EnumDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.value) { + if (!Array.isArray(object.value)) + throw TypeError(".google.protobuf.EnumDescriptorProto.value: array expected"); + message.value = []; + for (var i = 0; i < object.value.length; ++i) { + if (typeof object.value[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.value: object expected"); + message.value[i] = $root.google.protobuf.EnumValueDescriptorProto.fromObject(object.value[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumOptions.fromObject(object.options); + } + if (object.reservedRange) { + if (!Array.isArray(object.reservedRange)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: array expected"); + message.reservedRange = []; + for (var i = 0; i < object.reservedRange.length; ++i) { + if (typeof object.reservedRange[i] !== "object") + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedRange: object expected"); + message.reservedRange[i] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.fromObject(object.reservedRange[i]); + } + } + if (object.reservedName) { + if (!Array.isArray(object.reservedName)) + throw TypeError(".google.protobuf.EnumDescriptorProto.reservedName: array expected"); + message.reservedName = []; + for (var i = 0; i < object.reservedName.length; ++i) + message.reservedName[i] = String(object.reservedName[i]); + } + return message; + }; + + /** + * Creates a plain object from an EnumDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {google.protobuf.EnumDescriptorProto} message EnumDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.value = []; + object.reservedRange = []; + object.reservedName = []; + } + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.value && message.value.length) { + object.value = []; + for (var j = 0; j < message.value.length; ++j) + object.value[j] = $root.google.protobuf.EnumValueDescriptorProto.toObject(message.value[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumOptions.toObject(message.options, options); + if (message.reservedRange && message.reservedRange.length) { + object.reservedRange = []; + for (var j = 0; j < message.reservedRange.length; ++j) + object.reservedRange[j] = $root.google.protobuf.EnumDescriptorProto.EnumReservedRange.toObject(message.reservedRange[j], options); + } + if (message.reservedName && message.reservedName.length) { + object.reservedName = []; + for (var j = 0; j < message.reservedName.length; ++j) + object.reservedName[j] = message.reservedName[j]; + } + return object; + }; + + /** + * Converts this EnumDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + EnumDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EnumDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.EnumDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto"; + }; + + EnumDescriptorProto.EnumReservedRange = (function() { + + /** + * Properties of an EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @interface IEnumReservedRange + * @property {number|null} [start] EnumReservedRange start + * @property {number|null} [end] EnumReservedRange end + */ + + /** + * Constructs a new EnumReservedRange. + * @memberof google.protobuf.EnumDescriptorProto + * @classdesc Represents an EnumReservedRange. + * @implements IEnumReservedRange + * @constructor + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + */ + function EnumReservedRange(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumReservedRange start. + * @member {number} start + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.start = 0; + + /** + * EnumReservedRange end. + * @member {number} end + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + */ + EnumReservedRange.prototype.end = 0; + + /** + * Creates a new EnumReservedRange instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange=} [properties] Properties to set + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange instance + */ + EnumReservedRange.create = function create(properties) { + return new EnumReservedRange(properties); + }; + + /** + * Encodes the specified EnumReservedRange message. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.start != null && Object.hasOwnProperty.call(message, "start")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); + return writer; + }; + + /** + * Encodes the specified EnumReservedRange message, length delimited. Does not implicitly {@link google.protobuf.EnumDescriptorProto.EnumReservedRange.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.IEnumReservedRange} message EnumReservedRange message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumReservedRange.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.start = reader.int32(); + break; + } + case 2: { + message.end = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumReservedRange message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumReservedRange.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumReservedRange message. + * @function verify + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumReservedRange.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.start != null && message.hasOwnProperty("start")) + if (!$util.isInteger(message.start)) + return "start: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + return null; + }; + + /** + * Creates an EnumReservedRange message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumDescriptorProto.EnumReservedRange} EnumReservedRange + */ + EnumReservedRange.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumDescriptorProto.EnumReservedRange) + return object; + var message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); + if (object.start != null) + message.start = object.start | 0; + if (object.end != null) + message.end = object.end | 0; + return message; + }; + + /** + * Creates a plain object from an EnumReservedRange message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {google.protobuf.EnumDescriptorProto.EnumReservedRange} message EnumReservedRange + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumReservedRange.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.start = 0; + object.end = 0; + } + if (message.start != null && message.hasOwnProperty("start")) + object.start = message.start; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + return object; + }; + + /** + * Converts this EnumReservedRange to JSON. + * @function toJSON + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @instance + * @returns {Object.} JSON object + */ + EnumReservedRange.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EnumReservedRange + * @function getTypeUrl + * @memberof google.protobuf.EnumDescriptorProto.EnumReservedRange + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumReservedRange.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumDescriptorProto.EnumReservedRange"; + }; + + return EnumReservedRange; + })(); + + return EnumDescriptorProto; + })(); + + protobuf.EnumValueDescriptorProto = (function() { + + /** + * Properties of an EnumValueDescriptorProto. + * @memberof google.protobuf + * @interface IEnumValueDescriptorProto + * @property {string|null} [name] EnumValueDescriptorProto name + * @property {number|null} [number] EnumValueDescriptorProto number + * @property {google.protobuf.IEnumValueOptions|null} [options] EnumValueDescriptorProto options + */ + + /** + * Constructs a new EnumValueDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents an EnumValueDescriptorProto. + * @implements IEnumValueDescriptorProto + * @constructor + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set + */ + function EnumValueDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumValueDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.name = ""; + + /** + * EnumValueDescriptorProto number. + * @member {number} number + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.number = 0; + + /** + * EnumValueDescriptorProto options. + * @member {google.protobuf.IEnumValueOptions|null|undefined} options + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + */ + EnumValueDescriptorProto.prototype.options = null; + + /** + * Creates a new EnumValueDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto instance + */ + EnumValueDescriptorProto.create = function create(properties) { + return new EnumValueDescriptorProto(properties); + }; + + /** + * Encodes the specified EnumValueDescriptorProto message. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.number != null && Object.hasOwnProperty.call(message, "number")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumValueDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.EnumValueDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.IEnumValueDescriptorProto} message EnumValueDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.number = reader.int32(); + break; + } + case 3: { + message.options = $root.google.protobuf.EnumValueOptions.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumValueDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumValueDescriptorProto message. + * @function verify + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumValueDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.EnumValueOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates an EnumValueDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumValueDescriptorProto} EnumValueDescriptorProto + */ + EnumValueDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueDescriptorProto) + return object; + var message = new $root.google.protobuf.EnumValueDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.number != null) + message.number = object.number | 0; + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.EnumValueDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.EnumValueOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from an EnumValueDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {google.protobuf.EnumValueDescriptorProto} message EnumValueDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumValueDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.number = 0; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.EnumValueOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this EnumValueDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.EnumValueDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + EnumValueDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EnumValueDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.EnumValueDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumValueDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumValueDescriptorProto"; + }; + + return EnumValueDescriptorProto; + })(); + + protobuf.ServiceDescriptorProto = (function() { + + /** + * Properties of a ServiceDescriptorProto. + * @memberof google.protobuf + * @interface IServiceDescriptorProto + * @property {string|null} [name] ServiceDescriptorProto name + * @property {Array.|null} [method] ServiceDescriptorProto method + * @property {google.protobuf.IServiceOptions|null} [options] ServiceDescriptorProto options + */ + + /** + * Constructs a new ServiceDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a ServiceDescriptorProto. + * @implements IServiceDescriptorProto + * @constructor + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + */ + function ServiceDescriptorProto(properties) { + this.method = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ServiceDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.name = ""; + + /** + * ServiceDescriptorProto method. + * @member {Array.} method + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.method = $util.emptyArray; + + /** + * ServiceDescriptorProto options. + * @member {google.protobuf.IServiceOptions|null|undefined} options + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + */ + ServiceDescriptorProto.prototype.options = null; + + /** + * Creates a new ServiceDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto instance + */ + ServiceDescriptorProto.create = function create(properties) { + return new ServiceDescriptorProto(properties); + }; + + /** + * Encodes the specified ServiceDescriptorProto message. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.method != null && message.method.length) + for (var i = 0; i < message.method.length; ++i) + $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ServiceDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.ServiceDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.IServiceDescriptorProto} message ServiceDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + if (!(message.method && message.method.length)) + message.method = []; + message.method.push($root.google.protobuf.MethodDescriptorProto.decode(reader, reader.uint32())); + break; + } + case 3: { + message.options = $root.google.protobuf.ServiceOptions.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ServiceDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ServiceDescriptorProto message. + * @function verify + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ServiceDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.method != null && message.hasOwnProperty("method")) { + if (!Array.isArray(message.method)) + return "method: array expected"; + for (var i = 0; i < message.method.length; ++i) { + var error = $root.google.protobuf.MethodDescriptorProto.verify(message.method[i]); + if (error) + return "method." + error; + } + } + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.ServiceOptions.verify(message.options); + if (error) + return "options." + error; + } + return null; + }; + + /** + * Creates a ServiceDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ServiceDescriptorProto} ServiceDescriptorProto + */ + ServiceDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceDescriptorProto) + return object; + var message = new $root.google.protobuf.ServiceDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.method) { + if (!Array.isArray(object.method)) + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: array expected"); + message.method = []; + for (var i = 0; i < object.method.length; ++i) { + if (typeof object.method[i] !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.method: object expected"); + message.method[i] = $root.google.protobuf.MethodDescriptorProto.fromObject(object.method[i]); + } + } + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.ServiceDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.ServiceOptions.fromObject(object.options); + } + return message; + }; + + /** + * Creates a plain object from a ServiceDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {google.protobuf.ServiceDescriptorProto} message ServiceDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ServiceDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.method = []; + if (options.defaults) { + object.name = ""; + object.options = null; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.method && message.method.length) { + object.method = []; + for (var j = 0; j < message.method.length; ++j) + object.method[j] = $root.google.protobuf.MethodDescriptorProto.toObject(message.method[j], options); + } + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.ServiceOptions.toObject(message.options, options); + return object; + }; + + /** + * Converts this ServiceDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.ServiceDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + ServiceDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ServiceDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.ServiceDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ServiceDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ServiceDescriptorProto"; + }; + + return ServiceDescriptorProto; + })(); + + protobuf.MethodDescriptorProto = (function() { + + /** + * Properties of a MethodDescriptorProto. + * @memberof google.protobuf + * @interface IMethodDescriptorProto + * @property {string|null} [name] MethodDescriptorProto name + * @property {string|null} [inputType] MethodDescriptorProto inputType + * @property {string|null} [outputType] MethodDescriptorProto outputType + * @property {google.protobuf.IMethodOptions|null} [options] MethodDescriptorProto options + * @property {boolean|null} [clientStreaming] MethodDescriptorProto clientStreaming + * @property {boolean|null} [serverStreaming] MethodDescriptorProto serverStreaming + */ + + /** + * Constructs a new MethodDescriptorProto. + * @memberof google.protobuf + * @classdesc Represents a MethodDescriptorProto. + * @implements IMethodDescriptorProto + * @constructor + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set + */ + function MethodDescriptorProto(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MethodDescriptorProto name. + * @member {string} name + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.name = ""; + + /** + * MethodDescriptorProto inputType. + * @member {string} inputType + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.inputType = ""; + + /** + * MethodDescriptorProto outputType. + * @member {string} outputType + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.outputType = ""; + + /** + * MethodDescriptorProto options. + * @member {google.protobuf.IMethodOptions|null|undefined} options + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.options = null; + + /** + * MethodDescriptorProto clientStreaming. + * @member {boolean} clientStreaming + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.clientStreaming = false; + + /** + * MethodDescriptorProto serverStreaming. + * @member {boolean} serverStreaming + * @memberof google.protobuf.MethodDescriptorProto + * @instance + */ + MethodDescriptorProto.prototype.serverStreaming = false; + + /** + * Creates a new MethodDescriptorProto instance using the specified properties. + * @function create + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto=} [properties] Properties to set + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto instance + */ + MethodDescriptorProto.create = function create(properties) { + return new MethodDescriptorProto(properties); + }; + + /** + * Encodes the specified MethodDescriptorProto message. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodDescriptorProto.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); + if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); + if (message.options != null && Object.hasOwnProperty.call(message, "options")) + $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); + if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); + return writer; + }; + + /** + * Encodes the specified MethodDescriptorProto message, length delimited. Does not implicitly {@link google.protobuf.MethodDescriptorProto.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.IMethodDescriptorProto} message MethodDescriptorProto message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodDescriptorProto.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodDescriptorProto.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.inputType = reader.string(); + break; + } + case 3: { + message.outputType = reader.string(); + break; + } + case 4: { + message.options = $root.google.protobuf.MethodOptions.decode(reader, reader.uint32()); + break; + } + case 5: { + message.clientStreaming = reader.bool(); + break; + } + case 6: { + message.serverStreaming = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MethodDescriptorProto message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodDescriptorProto.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MethodDescriptorProto message. + * @function verify + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MethodDescriptorProto.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.inputType != null && message.hasOwnProperty("inputType")) + if (!$util.isString(message.inputType)) + return "inputType: string expected"; + if (message.outputType != null && message.hasOwnProperty("outputType")) + if (!$util.isString(message.outputType)) + return "outputType: string expected"; + if (message.options != null && message.hasOwnProperty("options")) { + var error = $root.google.protobuf.MethodOptions.verify(message.options); + if (error) + return "options." + error; + } + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + if (typeof message.clientStreaming !== "boolean") + return "clientStreaming: boolean expected"; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + if (typeof message.serverStreaming !== "boolean") + return "serverStreaming: boolean expected"; + return null; + }; + + /** + * Creates a MethodDescriptorProto message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MethodDescriptorProto} MethodDescriptorProto + */ + MethodDescriptorProto.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodDescriptorProto) + return object; + var message = new $root.google.protobuf.MethodDescriptorProto(); + if (object.name != null) + message.name = String(object.name); + if (object.inputType != null) + message.inputType = String(object.inputType); + if (object.outputType != null) + message.outputType = String(object.outputType); + if (object.options != null) { + if (typeof object.options !== "object") + throw TypeError(".google.protobuf.MethodDescriptorProto.options: object expected"); + message.options = $root.google.protobuf.MethodOptions.fromObject(object.options); + } + if (object.clientStreaming != null) + message.clientStreaming = Boolean(object.clientStreaming); + if (object.serverStreaming != null) + message.serverStreaming = Boolean(object.serverStreaming); + return message; + }; + + /** + * Creates a plain object from a MethodDescriptorProto message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {google.protobuf.MethodDescriptorProto} message MethodDescriptorProto + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MethodDescriptorProto.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.inputType = ""; + object.outputType = ""; + object.options = null; + object.clientStreaming = false; + object.serverStreaming = false; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.inputType != null && message.hasOwnProperty("inputType")) + object.inputType = message.inputType; + if (message.outputType != null && message.hasOwnProperty("outputType")) + object.outputType = message.outputType; + if (message.options != null && message.hasOwnProperty("options")) + object.options = $root.google.protobuf.MethodOptions.toObject(message.options, options); + if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + object.clientStreaming = message.clientStreaming; + if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + object.serverStreaming = message.serverStreaming; + return object; + }; + + /** + * Converts this MethodDescriptorProto to JSON. + * @function toJSON + * @memberof google.protobuf.MethodDescriptorProto + * @instance + * @returns {Object.} JSON object + */ + MethodDescriptorProto.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MethodDescriptorProto + * @function getTypeUrl + * @memberof google.protobuf.MethodDescriptorProto + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MethodDescriptorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MethodDescriptorProto"; + }; + + return MethodDescriptorProto; + })(); + + protobuf.FileOptions = (function() { + + /** + * Properties of a FileOptions. + * @memberof google.protobuf + * @interface IFileOptions + * @property {string|null} [javaPackage] FileOptions javaPackage + * @property {string|null} [javaOuterClassname] FileOptions javaOuterClassname + * @property {boolean|null} [javaMultipleFiles] FileOptions javaMultipleFiles + * @property {boolean|null} [javaGenerateEqualsAndHash] FileOptions javaGenerateEqualsAndHash + * @property {boolean|null} [javaStringCheckUtf8] FileOptions javaStringCheckUtf8 + * @property {google.protobuf.FileOptions.OptimizeMode|null} [optimizeFor] FileOptions optimizeFor + * @property {string|null} [goPackage] FileOptions goPackage + * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices + * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices + * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices + * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices + * @property {boolean|null} [deprecated] FileOptions deprecated + * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas + * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix + * @property {string|null} [csharpNamespace] FileOptions csharpNamespace + * @property {string|null} [swiftPrefix] FileOptions swiftPrefix + * @property {string|null} [phpClassPrefix] FileOptions phpClassPrefix + * @property {string|null} [phpNamespace] FileOptions phpNamespace + * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace + * @property {string|null} [rubyPackage] FileOptions rubyPackage + * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption + * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition + */ + + /** + * Constructs a new FileOptions. + * @memberof google.protobuf + * @classdesc Represents a FileOptions. + * @implements IFileOptions + * @constructor + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + */ + function FileOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.resourceDefinition"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FileOptions javaPackage. + * @member {string} javaPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaPackage = ""; + + /** + * FileOptions javaOuterClassname. + * @member {string} javaOuterClassname + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaOuterClassname = ""; + + /** + * FileOptions javaMultipleFiles. + * @member {boolean} javaMultipleFiles + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaMultipleFiles = false; + + /** + * FileOptions javaGenerateEqualsAndHash. + * @member {boolean} javaGenerateEqualsAndHash + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenerateEqualsAndHash = false; + + /** + * FileOptions javaStringCheckUtf8. + * @member {boolean} javaStringCheckUtf8 + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaStringCheckUtf8 = false; + + /** + * FileOptions optimizeFor. + * @member {google.protobuf.FileOptions.OptimizeMode} optimizeFor + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.optimizeFor = 1; + + /** + * FileOptions goPackage. + * @member {string} goPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.goPackage = ""; + + /** + * FileOptions ccGenericServices. + * @member {boolean} ccGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.ccGenericServices = false; + + /** + * FileOptions javaGenericServices. + * @member {boolean} javaGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.javaGenericServices = false; + + /** + * FileOptions pyGenericServices. + * @member {boolean} pyGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.pyGenericServices = false; + + /** + * FileOptions phpGenericServices. + * @member {boolean} phpGenericServices + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpGenericServices = false; + + /** + * FileOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.deprecated = false; + + /** + * FileOptions ccEnableArenas. + * @member {boolean} ccEnableArenas + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.ccEnableArenas = true; + + /** + * FileOptions objcClassPrefix. + * @member {string} objcClassPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.objcClassPrefix = ""; + + /** + * FileOptions csharpNamespace. + * @member {string} csharpNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.csharpNamespace = ""; + + /** + * FileOptions swiftPrefix. + * @member {string} swiftPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.swiftPrefix = ""; + + /** + * FileOptions phpClassPrefix. + * @member {string} phpClassPrefix + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpClassPrefix = ""; + + /** + * FileOptions phpNamespace. + * @member {string} phpNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpNamespace = ""; + + /** + * FileOptions phpMetadataNamespace. + * @member {string} phpMetadataNamespace + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.phpMetadataNamespace = ""; + + /** + * FileOptions rubyPackage. + * @member {string} rubyPackage + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.rubyPackage = ""; + + /** + * FileOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * FileOptions .google.api.resourceDefinition. + * @member {Array.} .google.api.resourceDefinition + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype[".google.api.resourceDefinition"] = $util.emptyArray; + + /** + * Creates a new FileOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions=} [properties] Properties to set + * @returns {google.protobuf.FileOptions} FileOptions instance + */ + FileOptions.create = function create(properties) { + return new FileOptions(properties); + }; + + /** + * Encodes the specified FileOptions message. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); + if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); + if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); + if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); + if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) + writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); + if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) + writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); + if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) + writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); + if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) + writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); + if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) + writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); + if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) + writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); + if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) + writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); + if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) + writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); + if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) + writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); + if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) + writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); + if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) + writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); + if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) + writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); + if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) + writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); + if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) + writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); + if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) + writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resourceDefinition"] != null && message[".google.api.resourceDefinition"].length) + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resourceDefinition"][i], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FileOptions message, length delimited. Does not implicitly {@link google.protobuf.FileOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.IFileOptions} message FileOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FileOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FileOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FileOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FileOptions} FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.javaPackage = reader.string(); + break; + } + case 8: { + message.javaOuterClassname = reader.string(); + break; + } + case 10: { + message.javaMultipleFiles = reader.bool(); + break; + } + case 20: { + message.javaGenerateEqualsAndHash = reader.bool(); + break; + } + case 27: { + message.javaStringCheckUtf8 = reader.bool(); + break; + } + case 9: { + message.optimizeFor = reader.int32(); + break; + } + case 11: { + message.goPackage = reader.string(); + break; + } + case 16: { + message.ccGenericServices = reader.bool(); + break; + } + case 17: { + message.javaGenericServices = reader.bool(); + break; + } + case 18: { + message.pyGenericServices = reader.bool(); + break; + } + case 42: { + message.phpGenericServices = reader.bool(); + break; + } + case 23: { + message.deprecated = reader.bool(); + break; + } + case 31: { + message.ccEnableArenas = reader.bool(); + break; + } + case 36: { + message.objcClassPrefix = reader.string(); + break; + } + case 37: { + message.csharpNamespace = reader.string(); + break; + } + case 39: { + message.swiftPrefix = reader.string(); + break; + } + case 40: { + message.phpClassPrefix = reader.string(); + break; + } + case 41: { + message.phpNamespace = reader.string(); + break; + } + case 44: { + message.phpMetadataNamespace = reader.string(); + break; + } + case 45: { + message.rubyPackage = reader.string(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1053: { + if (!(message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length)) + message[".google.api.resourceDefinition"] = []; + message[".google.api.resourceDefinition"].push($root.google.api.ResourceDescriptor.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FileOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FileOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FileOptions} FileOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FileOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FileOptions message. + * @function verify + * @memberof google.protobuf.FileOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FileOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + if (!$util.isString(message.javaPackage)) + return "javaPackage: string expected"; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + if (!$util.isString(message.javaOuterClassname)) + return "javaOuterClassname: string expected"; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + if (typeof message.javaMultipleFiles !== "boolean") + return "javaMultipleFiles: boolean expected"; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + if (typeof message.javaGenerateEqualsAndHash !== "boolean") + return "javaGenerateEqualsAndHash: boolean expected"; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + if (typeof message.javaStringCheckUtf8 !== "boolean") + return "javaStringCheckUtf8: boolean expected"; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + switch (message.optimizeFor) { + default: + return "optimizeFor: enum value expected"; + case 1: + case 2: + case 3: + break; + } + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + if (!$util.isString(message.goPackage)) + return "goPackage: string expected"; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + if (typeof message.ccGenericServices !== "boolean") + return "ccGenericServices: boolean expected"; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + if (typeof message.javaGenericServices !== "boolean") + return "javaGenericServices: boolean expected"; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + if (typeof message.pyGenericServices !== "boolean") + return "pyGenericServices: boolean expected"; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + if (typeof message.phpGenericServices !== "boolean") + return "phpGenericServices: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + if (typeof message.ccEnableArenas !== "boolean") + return "ccEnableArenas: boolean expected"; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + if (!$util.isString(message.objcClassPrefix)) + return "objcClassPrefix: string expected"; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + if (!$util.isString(message.csharpNamespace)) + return "csharpNamespace: string expected"; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + if (!$util.isString(message.swiftPrefix)) + return "swiftPrefix: string expected"; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + if (!$util.isString(message.phpClassPrefix)) + return "phpClassPrefix: string expected"; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + if (!$util.isString(message.phpNamespace)) + return "phpNamespace: string expected"; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + if (!$util.isString(message.phpMetadataNamespace)) + return "phpMetadataNamespace: string expected"; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + if (!$util.isString(message.rubyPackage)) + return "rubyPackage: string expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.resourceDefinition"] != null && message.hasOwnProperty(".google.api.resourceDefinition")) { + if (!Array.isArray(message[".google.api.resourceDefinition"])) + return ".google.api.resourceDefinition: array expected"; + for (var i = 0; i < message[".google.api.resourceDefinition"].length; ++i) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resourceDefinition"][i]); + if (error) + return ".google.api.resourceDefinition." + error; + } + } + return null; + }; + + /** + * Creates a FileOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FileOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FileOptions} FileOptions + */ + FileOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FileOptions) + return object; + var message = new $root.google.protobuf.FileOptions(); + if (object.javaPackage != null) + message.javaPackage = String(object.javaPackage); + if (object.javaOuterClassname != null) + message.javaOuterClassname = String(object.javaOuterClassname); + if (object.javaMultipleFiles != null) + message.javaMultipleFiles = Boolean(object.javaMultipleFiles); + if (object.javaGenerateEqualsAndHash != null) + message.javaGenerateEqualsAndHash = Boolean(object.javaGenerateEqualsAndHash); + if (object.javaStringCheckUtf8 != null) + message.javaStringCheckUtf8 = Boolean(object.javaStringCheckUtf8); + switch (object.optimizeFor) { + default: + if (typeof object.optimizeFor === "number") { + message.optimizeFor = object.optimizeFor; + break; + } + break; + case "SPEED": + case 1: + message.optimizeFor = 1; + break; + case "CODE_SIZE": + case 2: + message.optimizeFor = 2; + break; + case "LITE_RUNTIME": + case 3: + message.optimizeFor = 3; + break; + } + if (object.goPackage != null) + message.goPackage = String(object.goPackage); + if (object.ccGenericServices != null) + message.ccGenericServices = Boolean(object.ccGenericServices); + if (object.javaGenericServices != null) + message.javaGenericServices = Boolean(object.javaGenericServices); + if (object.pyGenericServices != null) + message.pyGenericServices = Boolean(object.pyGenericServices); + if (object.phpGenericServices != null) + message.phpGenericServices = Boolean(object.phpGenericServices); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.ccEnableArenas != null) + message.ccEnableArenas = Boolean(object.ccEnableArenas); + if (object.objcClassPrefix != null) + message.objcClassPrefix = String(object.objcClassPrefix); + if (object.csharpNamespace != null) + message.csharpNamespace = String(object.csharpNamespace); + if (object.swiftPrefix != null) + message.swiftPrefix = String(object.swiftPrefix); + if (object.phpClassPrefix != null) + message.phpClassPrefix = String(object.phpClassPrefix); + if (object.phpNamespace != null) + message.phpNamespace = String(object.phpNamespace); + if (object.phpMetadataNamespace != null) + message.phpMetadataNamespace = String(object.phpMetadataNamespace); + if (object.rubyPackage != null) + message.rubyPackage = String(object.rubyPackage); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.resourceDefinition"]) { + if (!Array.isArray(object[".google.api.resourceDefinition"])) + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: array expected"); + message[".google.api.resourceDefinition"] = []; + for (var i = 0; i < object[".google.api.resourceDefinition"].length; ++i) { + if (typeof object[".google.api.resourceDefinition"][i] !== "object") + throw TypeError(".google.protobuf.FileOptions..google.api.resourceDefinition: object expected"); + message[".google.api.resourceDefinition"][i] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resourceDefinition"][i]); + } + } + return message; + }; + + /** + * Creates a plain object from a FileOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FileOptions + * @static + * @param {google.protobuf.FileOptions} message FileOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FileOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.resourceDefinition"] = []; + } + if (options.defaults) { + object.javaPackage = ""; + object.javaOuterClassname = ""; + object.optimizeFor = options.enums === String ? "SPEED" : 1; + object.javaMultipleFiles = false; + object.goPackage = ""; + object.ccGenericServices = false; + object.javaGenericServices = false; + object.pyGenericServices = false; + object.javaGenerateEqualsAndHash = false; + object.deprecated = false; + object.javaStringCheckUtf8 = false; + object.ccEnableArenas = true; + object.objcClassPrefix = ""; + object.csharpNamespace = ""; + object.swiftPrefix = ""; + object.phpClassPrefix = ""; + object.phpNamespace = ""; + object.phpGenericServices = false; + object.phpMetadataNamespace = ""; + object.rubyPackage = ""; + } + if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + object.javaPackage = message.javaPackage; + if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + object.javaOuterClassname = message.javaOuterClassname; + if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + object.optimizeFor = options.enums === String ? $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] === undefined ? message.optimizeFor : $root.google.protobuf.FileOptions.OptimizeMode[message.optimizeFor] : message.optimizeFor; + if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + object.javaMultipleFiles = message.javaMultipleFiles; + if (message.goPackage != null && message.hasOwnProperty("goPackage")) + object.goPackage = message.goPackage; + if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + object.ccGenericServices = message.ccGenericServices; + if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + object.javaGenericServices = message.javaGenericServices; + if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + object.pyGenericServices = message.pyGenericServices; + if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + object.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + object.javaStringCheckUtf8 = message.javaStringCheckUtf8; + if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + object.ccEnableArenas = message.ccEnableArenas; + if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + object.objcClassPrefix = message.objcClassPrefix; + if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + object.csharpNamespace = message.csharpNamespace; + if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + object.swiftPrefix = message.swiftPrefix; + if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + object.phpClassPrefix = message.phpClassPrefix; + if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + object.phpNamespace = message.phpNamespace; + if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + object.phpGenericServices = message.phpGenericServices; + if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + object.phpMetadataNamespace = message.phpMetadataNamespace; + if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + object.rubyPackage = message.rubyPackage; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.resourceDefinition"] && message[".google.api.resourceDefinition"].length) { + object[".google.api.resourceDefinition"] = []; + for (var j = 0; j < message[".google.api.resourceDefinition"].length; ++j) + object[".google.api.resourceDefinition"][j] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resourceDefinition"][j], options); + } + return object; + }; + + /** + * Converts this FileOptions to JSON. + * @function toJSON + * @memberof google.protobuf.FileOptions + * @instance + * @returns {Object.} JSON object + */ + FileOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FileOptions + * @function getTypeUrl + * @memberof google.protobuf.FileOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FileOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FileOptions"; + }; + + /** + * OptimizeMode enum. + * @name google.protobuf.FileOptions.OptimizeMode + * @enum {number} + * @property {number} SPEED=1 SPEED value + * @property {number} CODE_SIZE=2 CODE_SIZE value + * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value + */ + FileOptions.OptimizeMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[1] = "SPEED"] = 1; + values[valuesById[2] = "CODE_SIZE"] = 2; + values[valuesById[3] = "LITE_RUNTIME"] = 3; + return values; + })(); + + return FileOptions; + })(); + + protobuf.MessageOptions = (function() { + + /** + * Properties of a MessageOptions. + * @memberof google.protobuf + * @interface IMessageOptions + * @property {boolean|null} [messageSetWireFormat] MessageOptions messageSetWireFormat + * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor + * @property {boolean|null} [deprecated] MessageOptions deprecated + * @property {boolean|null} [mapEntry] MessageOptions mapEntry + * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption + * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource + */ + + /** + * Constructs a new MessageOptions. + * @memberof google.protobuf + * @classdesc Represents a MessageOptions. + * @implements IMessageOptions + * @constructor + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + */ + function MessageOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MessageOptions messageSetWireFormat. + * @member {boolean} messageSetWireFormat + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.messageSetWireFormat = false; + + /** + * MessageOptions noStandardDescriptorAccessor. + * @member {boolean} noStandardDescriptorAccessor + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.noStandardDescriptorAccessor = false; + + /** + * MessageOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.deprecated = false; + + /** + * MessageOptions mapEntry. + * @member {boolean} mapEntry + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.mapEntry = false; + + /** + * MessageOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * MessageOptions .google.api.resource. + * @member {google.api.IResourceDescriptor|null|undefined} .google.api.resource + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype[".google.api.resource"] = null; + + /** + * Creates a new MessageOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions=} [properties] Properties to set + * @returns {google.protobuf.MessageOptions} MessageOptions instance + */ + MessageOptions.create = function create(properties) { + return new MessageOptions(properties); + }; + + /** + * Encodes the specified MessageOptions message. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MessageOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); + if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) + writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) + $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MessageOptions message, length delimited. Does not implicitly {@link google.protobuf.MessageOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.IMessageOptions} message MessageOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MessageOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MessageOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MessageOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MessageOptions} MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MessageOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.messageSetWireFormat = reader.bool(); + break; + } + case 2: { + message.noStandardDescriptorAccessor = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 7: { + message.mapEntry = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1053: { + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MessageOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MessageOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MessageOptions} MessageOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MessageOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MessageOptions message. + * @function verify + * @memberof google.protobuf.MessageOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MessageOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + if (typeof message.messageSetWireFormat !== "boolean") + return "messageSetWireFormat: boolean expected"; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + if (typeof message.noStandardDescriptorAccessor !== "boolean") + return "noStandardDescriptorAccessor: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + if (typeof message.mapEntry !== "boolean") + return "mapEntry: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) { + var error = $root.google.api.ResourceDescriptor.verify(message[".google.api.resource"]); + if (error) + return ".google.api.resource." + error; + } + return null; + }; + + /** + * Creates a MessageOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MessageOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MessageOptions} MessageOptions + */ + MessageOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MessageOptions) + return object; + var message = new $root.google.protobuf.MessageOptions(); + if (object.messageSetWireFormat != null) + message.messageSetWireFormat = Boolean(object.messageSetWireFormat); + if (object.noStandardDescriptorAccessor != null) + message.noStandardDescriptorAccessor = Boolean(object.noStandardDescriptorAccessor); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.mapEntry != null) + message.mapEntry = Boolean(object.mapEntry); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.resource"] != null) { + if (typeof object[".google.api.resource"] !== "object") + throw TypeError(".google.protobuf.MessageOptions..google.api.resource: object expected"); + message[".google.api.resource"] = $root.google.api.ResourceDescriptor.fromObject(object[".google.api.resource"]); + } + return message; + }; + + /** + * Creates a plain object from a MessageOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MessageOptions + * @static + * @param {google.protobuf.MessageOptions} message MessageOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MessageOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.messageSetWireFormat = false; + object.noStandardDescriptorAccessor = false; + object.deprecated = false; + object.mapEntry = false; + object[".google.api.resource"] = null; + } + if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + object.messageSetWireFormat = message.messageSetWireFormat; + if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + object.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + object.mapEntry = message.mapEntry; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + object[".google.api.resource"] = $root.google.api.ResourceDescriptor.toObject(message[".google.api.resource"], options); + return object; + }; + + /** + * Converts this MessageOptions to JSON. + * @function toJSON + * @memberof google.protobuf.MessageOptions + * @instance + * @returns {Object.} JSON object + */ + MessageOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MessageOptions + * @function getTypeUrl + * @memberof google.protobuf.MessageOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MessageOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MessageOptions"; + }; + + return MessageOptions; + })(); + + protobuf.FieldOptions = (function() { + + /** + * Properties of a FieldOptions. + * @memberof google.protobuf + * @interface IFieldOptions + * @property {google.protobuf.FieldOptions.CType|null} [ctype] FieldOptions ctype + * @property {boolean|null} [packed] FieldOptions packed + * @property {google.protobuf.FieldOptions.JSType|null} [jstype] FieldOptions jstype + * @property {boolean|null} [lazy] FieldOptions lazy + * @property {boolean|null} [unverifiedLazy] FieldOptions unverifiedLazy + * @property {boolean|null} [deprecated] FieldOptions deprecated + * @property {boolean|null} [weak] FieldOptions weak + * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption + * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName + * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior + * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference + */ + + /** + * Constructs a new FieldOptions. + * @memberof google.protobuf + * @classdesc Represents a FieldOptions. + * @implements IFieldOptions + * @constructor + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + */ + function FieldOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.fieldBehavior"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldOptions ctype. + * @member {google.protobuf.FieldOptions.CType} ctype + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.ctype = 0; + + /** + * FieldOptions packed. + * @member {boolean} packed + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.packed = false; + + /** + * FieldOptions jstype. + * @member {google.protobuf.FieldOptions.JSType} jstype + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.jstype = 0; + + /** + * FieldOptions lazy. + * @member {boolean} lazy + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.lazy = false; + + /** + * FieldOptions unverifiedLazy. + * @member {boolean} unverifiedLazy + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.unverifiedLazy = false; + + /** + * FieldOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.deprecated = false; + + /** + * FieldOptions weak. + * @member {boolean} weak + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.weak = false; + + /** + * FieldOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * FieldOptions .google.cloud.bigquery.storage.v1.columnName. + * @member {string} .google.cloud.bigquery.storage.v1.columnName + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.cloud.bigquery.storage.v1.columnName"] = null; + + /** + * FieldOptions .google.api.fieldBehavior. + * @member {Array.} .google.api.fieldBehavior + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.api.fieldBehavior"] = $util.emptyArray; + + /** + * FieldOptions .google.api.resourceReference. + * @member {google.api.IResourceReference|null|undefined} .google.api.resourceReference + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype[".google.api.resourceReference"] = null; + + /** + * Creates a new FieldOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions} FieldOptions instance + */ + FieldOptions.create = function create(properties) { + return new FieldOptions(properties); + }; + + /** + * Encodes the specified FieldOptions message. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); + if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); + if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); + if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) + writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); + if (message.unverifiedLazy != null && Object.hasOwnProperty.call(message, "unverifiedLazy")) + writer.uint32(/* id 15, wireType 0 =*/120).bool(message.unverifiedLazy); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { + writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + writer.int32(message[".google.api.fieldBehavior"][i]); + writer.ldelim(); + } + if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) + $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && Object.hasOwnProperty.call(message, ".google.cloud.bigquery.storage.v1.columnName")) + writer.uint32(/* id 454943157, wireType 2 =*/3639545258).string(message[".google.cloud.bigquery.storage.v1.columnName"]); + return writer; + }; + + /** + * Encodes the specified FieldOptions message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.IFieldOptions} message FieldOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions} FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.ctype = reader.int32(); + break; + } + case 2: { + message.packed = reader.bool(); + break; + } + case 6: { + message.jstype = reader.int32(); + break; + } + case 5: { + message.lazy = reader.bool(); + break; + } + case 15: { + message.unverifiedLazy = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 10: { + message.weak = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 454943157: { + message[".google.cloud.bigquery.storage.v1.columnName"] = reader.string(); + break; + } + case 1052: { + if (!(message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length)) + message[".google.api.fieldBehavior"] = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message[".google.api.fieldBehavior"].push(reader.int32()); + } else + message[".google.api.fieldBehavior"].push(reader.int32()); + break; + } + case 1055: { + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions} FieldOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldOptions message. + * @function verify + * @memberof google.protobuf.FieldOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.ctype != null && message.hasOwnProperty("ctype")) + switch (message.ctype) { + default: + return "ctype: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.packed != null && message.hasOwnProperty("packed")) + if (typeof message.packed !== "boolean") + return "packed: boolean expected"; + if (message.jstype != null && message.hasOwnProperty("jstype")) + switch (message.jstype) { + default: + return "jstype: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.lazy != null && message.hasOwnProperty("lazy")) + if (typeof message.lazy !== "boolean") + return "lazy: boolean expected"; + if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) + if (typeof message.unverifiedLazy !== "boolean") + return "unverifiedLazy: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.weak != null && message.hasOwnProperty("weak")) + if (typeof message.weak !== "boolean") + return "weak: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) + if (!$util.isString(message[".google.cloud.bigquery.storage.v1.columnName"])) + return ".google.cloud.bigquery.storage.v1.columnName: string expected"; + if (message[".google.api.fieldBehavior"] != null && message.hasOwnProperty(".google.api.fieldBehavior")) { + if (!Array.isArray(message[".google.api.fieldBehavior"])) + return ".google.api.fieldBehavior: array expected"; + for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) + switch (message[".google.api.fieldBehavior"][i]) { + default: + return ".google.api.fieldBehavior: enum value[] expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + break; + } + } + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) { + var error = $root.google.api.ResourceReference.verify(message[".google.api.resourceReference"]); + if (error) + return ".google.api.resourceReference." + error; + } + return null; + }; + + /** + * Creates a FieldOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions} FieldOptions + */ + FieldOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions) + return object; + var message = new $root.google.protobuf.FieldOptions(); + switch (object.ctype) { + default: + if (typeof object.ctype === "number") { + message.ctype = object.ctype; + break; + } + break; + case "STRING": + case 0: + message.ctype = 0; + break; + case "CORD": + case 1: + message.ctype = 1; + break; + case "STRING_PIECE": + case 2: + message.ctype = 2; + break; + } + if (object.packed != null) + message.packed = Boolean(object.packed); + switch (object.jstype) { + default: + if (typeof object.jstype === "number") { + message.jstype = object.jstype; + break; + } + break; + case "JS_NORMAL": + case 0: + message.jstype = 0; + break; + case "JS_STRING": + case 1: + message.jstype = 1; + break; + case "JS_NUMBER": + case 2: + message.jstype = 2; + break; + } + if (object.lazy != null) + message.lazy = Boolean(object.lazy); + if (object.unverifiedLazy != null) + message.unverifiedLazy = Boolean(object.unverifiedLazy); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.weak != null) + message.weak = Boolean(object.weak); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.cloud.bigquery.storage.v1.columnName"] != null) + message[".google.cloud.bigquery.storage.v1.columnName"] = String(object[".google.cloud.bigquery.storage.v1.columnName"]); + if (object[".google.api.fieldBehavior"]) { + if (!Array.isArray(object[".google.api.fieldBehavior"])) + throw TypeError(".google.protobuf.FieldOptions..google.api.fieldBehavior: array expected"); + message[".google.api.fieldBehavior"] = []; + for (var i = 0; i < object[".google.api.fieldBehavior"].length; ++i) + switch (object[".google.api.fieldBehavior"][i]) { + default: + if (typeof object[".google.api.fieldBehavior"][i] === "number") { + message[".google.api.fieldBehavior"][i] = object[".google.api.fieldBehavior"][i]; + break; + } + case "FIELD_BEHAVIOR_UNSPECIFIED": + case 0: + message[".google.api.fieldBehavior"][i] = 0; + break; + case "OPTIONAL": + case 1: + message[".google.api.fieldBehavior"][i] = 1; + break; + case "REQUIRED": + case 2: + message[".google.api.fieldBehavior"][i] = 2; + break; + case "OUTPUT_ONLY": + case 3: + message[".google.api.fieldBehavior"][i] = 3; + break; + case "INPUT_ONLY": + case 4: + message[".google.api.fieldBehavior"][i] = 4; + break; + case "IMMUTABLE": + case 5: + message[".google.api.fieldBehavior"][i] = 5; + break; + case "UNORDERED_LIST": + case 6: + message[".google.api.fieldBehavior"][i] = 6; + break; + case "NON_EMPTY_DEFAULT": + case 7: + message[".google.api.fieldBehavior"][i] = 7; + break; + } + } + if (object[".google.api.resourceReference"] != null) { + if (typeof object[".google.api.resourceReference"] !== "object") + throw TypeError(".google.protobuf.FieldOptions..google.api.resourceReference: object expected"); + message[".google.api.resourceReference"] = $root.google.api.ResourceReference.fromObject(object[".google.api.resourceReference"]); + } + return message; + }; + + /** + * Creates a plain object from a FieldOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions + * @static + * @param {google.protobuf.FieldOptions} message FieldOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.fieldBehavior"] = []; + } + if (options.defaults) { + object.ctype = options.enums === String ? "STRING" : 0; + object.packed = false; + object.deprecated = false; + object.lazy = false; + object.jstype = options.enums === String ? "JS_NORMAL" : 0; + object.weak = false; + object.unverifiedLazy = false; + object[".google.api.resourceReference"] = null; + object[".google.cloud.bigquery.storage.v1.columnName"] = null; + } + if (message.ctype != null && message.hasOwnProperty("ctype")) + object.ctype = options.enums === String ? $root.google.protobuf.FieldOptions.CType[message.ctype] === undefined ? message.ctype : $root.google.protobuf.FieldOptions.CType[message.ctype] : message.ctype; + if (message.packed != null && message.hasOwnProperty("packed")) + object.packed = message.packed; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.lazy != null && message.hasOwnProperty("lazy")) + object.lazy = message.lazy; + if (message.jstype != null && message.hasOwnProperty("jstype")) + object.jstype = options.enums === String ? $root.google.protobuf.FieldOptions.JSType[message.jstype] === undefined ? message.jstype : $root.google.protobuf.FieldOptions.JSType[message.jstype] : message.jstype; + if (message.weak != null && message.hasOwnProperty("weak")) + object.weak = message.weak; + if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) + object.unverifiedLazy = message.unverifiedLazy; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.fieldBehavior"] && message[".google.api.fieldBehavior"].length) { + object[".google.api.fieldBehavior"] = []; + for (var j = 0; j < message[".google.api.fieldBehavior"].length; ++j) + object[".google.api.fieldBehavior"][j] = options.enums === String ? $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] === undefined ? message[".google.api.fieldBehavior"][j] : $root.google.api.FieldBehavior[message[".google.api.fieldBehavior"][j]] : message[".google.api.fieldBehavior"][j]; + } + if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + object[".google.api.resourceReference"] = $root.google.api.ResourceReference.toObject(message[".google.api.resourceReference"], options); + if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && message.hasOwnProperty(".google.cloud.bigquery.storage.v1.columnName")) + object[".google.cloud.bigquery.storage.v1.columnName"] = message[".google.cloud.bigquery.storage.v1.columnName"]; + return object; + }; + + /** + * Converts this FieldOptions to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions + * @instance + * @returns {Object.} JSON object + */ + FieldOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldOptions + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions"; + }; + + /** + * CType enum. + * @name google.protobuf.FieldOptions.CType + * @enum {number} + * @property {number} STRING=0 STRING value + * @property {number} CORD=1 CORD value + * @property {number} STRING_PIECE=2 STRING_PIECE value + */ + FieldOptions.CType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STRING"] = 0; + values[valuesById[1] = "CORD"] = 1; + values[valuesById[2] = "STRING_PIECE"] = 2; + return values; + })(); + + /** + * JSType enum. + * @name google.protobuf.FieldOptions.JSType + * @enum {number} + * @property {number} JS_NORMAL=0 JS_NORMAL value + * @property {number} JS_STRING=1 JS_STRING value + * @property {number} JS_NUMBER=2 JS_NUMBER value + */ + FieldOptions.JSType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "JS_NORMAL"] = 0; + values[valuesById[1] = "JS_STRING"] = 1; + values[valuesById[2] = "JS_NUMBER"] = 2; + return values; + })(); + + return FieldOptions; + })(); + + protobuf.OneofOptions = (function() { + + /** + * Properties of an OneofOptions. + * @memberof google.protobuf + * @interface IOneofOptions + * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption + */ + + /** + * Constructs a new OneofOptions. + * @memberof google.protobuf + * @classdesc Represents an OneofOptions. + * @implements IOneofOptions + * @constructor + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + */ + function OneofOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * OneofOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.OneofOptions + * @instance + */ + OneofOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + * @returns {google.protobuf.OneofOptions} OneofOptions instance + */ + OneofOptions.create = function create(properties) { + return new OneofOptions(properties); + }; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofOptions} OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an OneofOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.OneofOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.OneofOptions} OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an OneofOptions message. + * @function verify + * @memberof google.protobuf.OneofOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + OneofOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an OneofOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.OneofOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.OneofOptions} OneofOptions + */ + OneofOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.OneofOptions) + return object; + var message = new $root.google.protobuf.OneofOptions(); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an OneofOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.OneofOptions} message OneofOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + OneofOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this OneofOptions to JSON. + * @function toJSON + * @memberof google.protobuf.OneofOptions + * @instance + * @returns {Object.} JSON object + */ + OneofOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for OneofOptions + * @function getTypeUrl + * @memberof google.protobuf.OneofOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + OneofOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.OneofOptions"; + }; + + return OneofOptions; + })(); + + protobuf.EnumOptions = (function() { + + /** + * Properties of an EnumOptions. + * @memberof google.protobuf + * @interface IEnumOptions + * @property {boolean|null} [allowAlias] EnumOptions allowAlias + * @property {boolean|null} [deprecated] EnumOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption + */ + + /** + * Constructs a new EnumOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumOptions. + * @implements IEnumOptions + * @constructor + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + */ + function EnumOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumOptions allowAlias. + * @member {boolean} allowAlias + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.allowAlias = false; + + /** + * EnumOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.deprecated = false; + + /** + * EnumOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new EnumOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumOptions} EnumOptions instance + */ + EnumOptions.create = function create(properties) { + return new EnumOptions(properties); + }; + + /** + * Encodes the specified EnumOptions message. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.IEnumOptions} message EnumOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + message.allowAlias = reader.bool(); + break; + } + case 3: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumOptions} EnumOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumOptions message. + * @function verify + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + if (typeof message.allowAlias !== "boolean") + return "allowAlias: boolean expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an EnumOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumOptions} EnumOptions + */ + EnumOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumOptions) + return object; + var message = new $root.google.protobuf.EnumOptions(); + if (object.allowAlias != null) + message.allowAlias = Boolean(object.allowAlias); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an EnumOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumOptions + * @static + * @param {google.protobuf.EnumOptions} message EnumOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.allowAlias = false; + object.deprecated = false; + } + if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + object.allowAlias = message.allowAlias; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this EnumOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumOptions + * @instance + * @returns {Object.} JSON object + */ + EnumOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EnumOptions + * @function getTypeUrl + * @memberof google.protobuf.EnumOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumOptions"; + }; + + return EnumOptions; + })(); + + protobuf.EnumValueOptions = (function() { + + /** + * Properties of an EnumValueOptions. + * @memberof google.protobuf + * @interface IEnumValueOptions + * @property {boolean|null} [deprecated] EnumValueOptions deprecated + * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption + */ + + /** + * Constructs a new EnumValueOptions. + * @memberof google.protobuf + * @classdesc Represents an EnumValueOptions. + * @implements IEnumValueOptions + * @constructor + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + */ + function EnumValueOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EnumValueOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.deprecated = false; + + /** + * EnumValueOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance + */ + EnumValueOptions.create = function create(properties) { + return new EnumValueOptions(properties); + }; + + /** + * Encodes the specified EnumValueOptions message. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified EnumValueOptions message, length delimited. Does not implicitly {@link google.protobuf.EnumValueOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions} message EnumValueOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EnumValueOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EnumValueOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EnumValueOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EnumValueOptions message. + * @function verify + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EnumValueOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + return null; + }; + + /** + * Creates an EnumValueOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.EnumValueOptions} EnumValueOptions + */ + EnumValueOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.EnumValueOptions) + return object; + var message = new $root.google.protobuf.EnumValueOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + return message; + }; + + /** + * Creates a plain object from an EnumValueOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.EnumValueOptions} message EnumValueOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EnumValueOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) + object.deprecated = false; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + return object; + }; + + /** + * Converts this EnumValueOptions to JSON. + * @function toJSON + * @memberof google.protobuf.EnumValueOptions + * @instance + * @returns {Object.} JSON object + */ + EnumValueOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EnumValueOptions + * @function getTypeUrl + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EnumValueOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.EnumValueOptions"; + }; + + return EnumValueOptions; + })(); + + protobuf.ServiceOptions = (function() { + + /** + * Properties of a ServiceOptions. + * @memberof google.protobuf + * @interface IServiceOptions + * @property {boolean|null} [deprecated] ServiceOptions deprecated + * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption + * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost + * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes + */ + + /** + * Constructs a new ServiceOptions. + * @memberof google.protobuf + * @classdesc Represents a ServiceOptions. + * @implements IServiceOptions + * @constructor + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + */ + function ServiceOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ServiceOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.deprecated = false; + + /** + * ServiceOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * ServiceOptions .google.api.defaultHost. + * @member {string} .google.api.defaultHost + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.defaultHost"] = ""; + + /** + * ServiceOptions .google.api.oauthScopes. + * @member {string} .google.api.oauthScopes + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.oauthScopes"] = ""; + + /** + * Creates a new ServiceOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions=} [properties] Properties to set + * @returns {google.protobuf.ServiceOptions} ServiceOptions instance + */ + ServiceOptions.create = function create(properties) { + return new ServiceOptions(properties); + }; + + /** + * Encodes the specified ServiceOptions message. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) + writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); + if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) + writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); + return writer; + }; + + /** + * Encodes the specified ServiceOptions message, length delimited. Does not implicitly {@link google.protobuf.ServiceOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.IServiceOptions} message ServiceOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ServiceOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ServiceOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 33: { + message.deprecated = reader.bool(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 1049: { + message[".google.api.defaultHost"] = reader.string(); + break; + } + case 1050: { + message[".google.api.oauthScopes"] = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ServiceOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ServiceOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ServiceOptions} ServiceOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ServiceOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ServiceOptions message. + * @function verify + * @memberof google.protobuf.ServiceOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ServiceOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + if (!$util.isString(message[".google.api.defaultHost"])) + return ".google.api.defaultHost: string expected"; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + if (!$util.isString(message[".google.api.oauthScopes"])) + return ".google.api.oauthScopes: string expected"; + return null; + }; + + /** + * Creates a ServiceOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ServiceOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ServiceOptions} ServiceOptions + */ + ServiceOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ServiceOptions) + return object; + var message = new $root.google.protobuf.ServiceOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.ServiceOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.defaultHost"] != null) + message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); + if (object[".google.api.oauthScopes"] != null) + message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); + return message; + }; + + /** + * Creates a plain object from a ServiceOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ServiceOptions + * @static + * @param {google.protobuf.ServiceOptions} message ServiceOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ServiceOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.uninterpretedOption = []; + if (options.defaults) { + object.deprecated = false; + object[".google.api.defaultHost"] = ""; + object[".google.api.oauthScopes"] = ""; + } + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; + if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; + return object; + }; + + /** + * Converts this ServiceOptions to JSON. + * @function toJSON + * @memberof google.protobuf.ServiceOptions + * @instance + * @returns {Object.} JSON object + */ + ServiceOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ServiceOptions + * @function getTypeUrl + * @memberof google.protobuf.ServiceOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ServiceOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ServiceOptions"; + }; + + return ServiceOptions; + })(); + + protobuf.MethodOptions = (function() { + + /** + * Properties of a MethodOptions. + * @memberof google.protobuf + * @interface IMethodOptions + * @property {boolean|null} [deprecated] MethodOptions deprecated + * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel + * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption + * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http + * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature + */ + + /** + * Constructs a new MethodOptions. + * @memberof google.protobuf + * @classdesc Represents a MethodOptions. + * @implements IMethodOptions + * @constructor + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + */ + function MethodOptions(properties) { + this.uninterpretedOption = []; + this[".google.api.methodSignature"] = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MethodOptions deprecated. + * @member {boolean} deprecated + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.deprecated = false; + + /** + * MethodOptions idempotencyLevel. + * @member {google.protobuf.MethodOptions.IdempotencyLevel} idempotencyLevel + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.idempotencyLevel = 0; + + /** + * MethodOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * MethodOptions .google.api.http. + * @member {google.api.IHttpRule|null|undefined} .google.api.http + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype[".google.api.http"] = null; + + /** + * MethodOptions .google.api.methodSignature. + * @member {Array.} .google.api.methodSignature + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype[".google.api.methodSignature"] = $util.emptyArray; + + /** + * Creates a new MethodOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions=} [properties] Properties to set + * @returns {google.protobuf.MethodOptions} MethodOptions instance + */ + MethodOptions.create = function create(properties) { + return new MethodOptions(properties); + }; + + /** + * Encodes the specified MethodOptions message. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) + writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) + writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); + if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) + $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MethodOptions message, length delimited. Does not implicitly {@link google.protobuf.MethodOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.IMethodOptions} message MethodOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MethodOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.MethodOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.MethodOptions} MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodOptions.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 33: { + message.deprecated = reader.bool(); + break; + } + case 34: { + message.idempotencyLevel = reader.int32(); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + message.uninterpretedOption = []; + message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); + break; + } + case 72295728: { + message[".google.api.http"] = $root.google.api.HttpRule.decode(reader, reader.uint32()); + break; + } + case 1051: { + if (!(message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length)) + message[".google.api.methodSignature"] = []; + message[".google.api.methodSignature"].push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MethodOptions message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.MethodOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.MethodOptions} MethodOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodOptions.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MethodOptions message. + * @function verify + * @memberof google.protobuf.MethodOptions + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MethodOptions.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (typeof message.deprecated !== "boolean") + return "deprecated: boolean expected"; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + switch (message.idempotencyLevel) { + default: + return "idempotencyLevel: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { + if (!Array.isArray(message.uninterpretedOption)) + return "uninterpretedOption: array expected"; + for (var i = 0; i < message.uninterpretedOption.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.verify(message.uninterpretedOption[i]); + if (error) + return "uninterpretedOption." + error; + } + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) { + var error = $root.google.api.HttpRule.verify(message[".google.api.http"]); + if (error) + return ".google.api.http." + error; + } + if (message[".google.api.methodSignature"] != null && message.hasOwnProperty(".google.api.methodSignature")) { + if (!Array.isArray(message[".google.api.methodSignature"])) + return ".google.api.methodSignature: array expected"; + for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) + if (!$util.isString(message[".google.api.methodSignature"][i])) + return ".google.api.methodSignature: string[] expected"; + } + return null; + }; + + /** + * Creates a MethodOptions message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.MethodOptions + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.MethodOptions} MethodOptions + */ + MethodOptions.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.MethodOptions) + return object; + var message = new $root.google.protobuf.MethodOptions(); + if (object.deprecated != null) + message.deprecated = Boolean(object.deprecated); + switch (object.idempotencyLevel) { + default: + if (typeof object.idempotencyLevel === "number") { + message.idempotencyLevel = object.idempotencyLevel; + break; + } + break; + case "IDEMPOTENCY_UNKNOWN": + case 0: + message.idempotencyLevel = 0; + break; + case "NO_SIDE_EFFECTS": + case 1: + message.idempotencyLevel = 1; + break; + case "IDEMPOTENT": + case 2: + message.idempotencyLevel = 2; + break; + } + if (object.uninterpretedOption) { + if (!Array.isArray(object.uninterpretedOption)) + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); + message.uninterpretedOption = []; + for (var i = 0; i < object.uninterpretedOption.length; ++i) { + if (typeof object.uninterpretedOption[i] !== "object") + throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: object expected"); + message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); + } + } + if (object[".google.api.http"] != null) { + if (typeof object[".google.api.http"] !== "object") + throw TypeError(".google.protobuf.MethodOptions..google.api.http: object expected"); + message[".google.api.http"] = $root.google.api.HttpRule.fromObject(object[".google.api.http"]); + } + if (object[".google.api.methodSignature"]) { + if (!Array.isArray(object[".google.api.methodSignature"])) + throw TypeError(".google.protobuf.MethodOptions..google.api.methodSignature: array expected"); + message[".google.api.methodSignature"] = []; + for (var i = 0; i < object[".google.api.methodSignature"].length; ++i) + message[".google.api.methodSignature"][i] = String(object[".google.api.methodSignature"][i]); + } + return message; + }; + + /** + * Creates a plain object from a MethodOptions message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.MethodOptions + * @static + * @param {google.protobuf.MethodOptions} message MethodOptions + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MethodOptions.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.uninterpretedOption = []; + object[".google.api.methodSignature"] = []; + } + if (options.defaults) { + object.deprecated = false; + object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; + object[".google.api.http"] = null; + } + if (message.deprecated != null && message.hasOwnProperty("deprecated")) + object.deprecated = message.deprecated; + if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] === undefined ? message.idempotencyLevel : $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; + if (message.uninterpretedOption && message.uninterpretedOption.length) { + object.uninterpretedOption = []; + for (var j = 0; j < message.uninterpretedOption.length; ++j) + object.uninterpretedOption[j] = $root.google.protobuf.UninterpretedOption.toObject(message.uninterpretedOption[j], options); + } + if (message[".google.api.methodSignature"] && message[".google.api.methodSignature"].length) { + object[".google.api.methodSignature"] = []; + for (var j = 0; j < message[".google.api.methodSignature"].length; ++j) + object[".google.api.methodSignature"][j] = message[".google.api.methodSignature"][j]; + } + if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + object[".google.api.http"] = $root.google.api.HttpRule.toObject(message[".google.api.http"], options); + return object; + }; + + /** + * Converts this MethodOptions to JSON. + * @function toJSON + * @memberof google.protobuf.MethodOptions + * @instance + * @returns {Object.} JSON object + */ + MethodOptions.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MethodOptions + * @function getTypeUrl + * @memberof google.protobuf.MethodOptions + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MethodOptions.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.MethodOptions"; + }; + + /** + * IdempotencyLevel enum. + * @name google.protobuf.MethodOptions.IdempotencyLevel + * @enum {number} + * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value + * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value + * @property {number} IDEMPOTENT=2 IDEMPOTENT value + */ + MethodOptions.IdempotencyLevel = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "IDEMPOTENCY_UNKNOWN"] = 0; + values[valuesById[1] = "NO_SIDE_EFFECTS"] = 1; + values[valuesById[2] = "IDEMPOTENT"] = 2; + return values; + })(); + + return MethodOptions; + })(); + + protobuf.UninterpretedOption = (function() { + + /** + * Properties of an UninterpretedOption. + * @memberof google.protobuf + * @interface IUninterpretedOption + * @property {Array.|null} [name] UninterpretedOption name + * @property {string|null} [identifierValue] UninterpretedOption identifierValue + * @property {number|Long|null} [positiveIntValue] UninterpretedOption positiveIntValue + * @property {number|Long|null} [negativeIntValue] UninterpretedOption negativeIntValue + * @property {number|null} [doubleValue] UninterpretedOption doubleValue + * @property {Uint8Array|null} [stringValue] UninterpretedOption stringValue + * @property {string|null} [aggregateValue] UninterpretedOption aggregateValue + */ + + /** + * Constructs a new UninterpretedOption. + * @memberof google.protobuf + * @classdesc Represents an UninterpretedOption. + * @implements IUninterpretedOption + * @constructor + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + */ + function UninterpretedOption(properties) { + this.name = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UninterpretedOption name. + * @member {Array.} name + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.name = $util.emptyArray; + + /** + * UninterpretedOption identifierValue. + * @member {string} identifierValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.identifierValue = ""; + + /** + * UninterpretedOption positiveIntValue. + * @member {number|Long} positiveIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.positiveIntValue = $util.Long ? $util.Long.fromBits(0,0,true) : 0; + + /** + * UninterpretedOption negativeIntValue. + * @member {number|Long} negativeIntValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.negativeIntValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * UninterpretedOption doubleValue. + * @member {number} doubleValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.doubleValue = 0; + + /** + * UninterpretedOption stringValue. + * @member {Uint8Array} stringValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.stringValue = $util.newBuffer([]); + + /** + * UninterpretedOption aggregateValue. + * @member {string} aggregateValue + * @memberof google.protobuf.UninterpretedOption + * @instance + */ + UninterpretedOption.prototype.aggregateValue = ""; + + /** + * Creates a new UninterpretedOption instance using the specified properties. + * @function create + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption instance + */ + UninterpretedOption.create = function create(properties) { + return new UninterpretedOption(properties); + }; + + /** + * Encodes the specified UninterpretedOption message. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UninterpretedOption.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && message.name.length) + for (var i = 0; i < message.name.length; ++i) + $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); + if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) + writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); + if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) + writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) + writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) + writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); + if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); + return writer; + }; + + /** + * Encodes the specified UninterpretedOption message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.IUninterpretedOption} message UninterpretedOption message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UninterpretedOption.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UninterpretedOption.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (!(message.name && message.name.length)) + message.name = []; + message.name.push($root.google.protobuf.UninterpretedOption.NamePart.decode(reader, reader.uint32())); + break; + } + case 3: { + message.identifierValue = reader.string(); + break; + } + case 4: { + message.positiveIntValue = reader.uint64(); + break; + } + case 5: { + message.negativeIntValue = reader.int64(); + break; + } + case 6: { + message.doubleValue = reader.double(); + break; + } + case 7: { + message.stringValue = reader.bytes(); + break; + } + case 8: { + message.aggregateValue = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an UninterpretedOption message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UninterpretedOption.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an UninterpretedOption message. + * @function verify + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UninterpretedOption.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) { + if (!Array.isArray(message.name)) + return "name: array expected"; + for (var i = 0; i < message.name.length; ++i) { + var error = $root.google.protobuf.UninterpretedOption.NamePart.verify(message.name[i]); + if (error) + return "name." + error; + } + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + if (!$util.isString(message.identifierValue)) + return "identifierValue: string expected"; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (!$util.isInteger(message.positiveIntValue) && !(message.positiveIntValue && $util.isInteger(message.positiveIntValue.low) && $util.isInteger(message.positiveIntValue.high))) + return "positiveIntValue: integer|Long expected"; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (!$util.isInteger(message.negativeIntValue) && !(message.negativeIntValue && $util.isInteger(message.negativeIntValue.low) && $util.isInteger(message.negativeIntValue.high))) + return "negativeIntValue: integer|Long expected"; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + if (typeof message.doubleValue !== "number") + return "doubleValue: number expected"; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (!(message.stringValue && typeof message.stringValue.length === "number" || $util.isString(message.stringValue))) + return "stringValue: buffer expected"; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + if (!$util.isString(message.aggregateValue)) + return "aggregateValue: string expected"; + return null; + }; + + /** + * Creates an UninterpretedOption message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption} UninterpretedOption + */ + UninterpretedOption.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption) + return object; + var message = new $root.google.protobuf.UninterpretedOption(); + if (object.name) { + if (!Array.isArray(object.name)) + throw TypeError(".google.protobuf.UninterpretedOption.name: array expected"); + message.name = []; + for (var i = 0; i < object.name.length; ++i) { + if (typeof object.name[i] !== "object") + throw TypeError(".google.protobuf.UninterpretedOption.name: object expected"); + message.name[i] = $root.google.protobuf.UninterpretedOption.NamePart.fromObject(object.name[i]); + } + } + if (object.identifierValue != null) + message.identifierValue = String(object.identifierValue); + if (object.positiveIntValue != null) + if ($util.Long) + (message.positiveIntValue = $util.Long.fromValue(object.positiveIntValue)).unsigned = true; + else if (typeof object.positiveIntValue === "string") + message.positiveIntValue = parseInt(object.positiveIntValue, 10); + else if (typeof object.positiveIntValue === "number") + message.positiveIntValue = object.positiveIntValue; + else if (typeof object.positiveIntValue === "object") + message.positiveIntValue = new $util.LongBits(object.positiveIntValue.low >>> 0, object.positiveIntValue.high >>> 0).toNumber(true); + if (object.negativeIntValue != null) + if ($util.Long) + (message.negativeIntValue = $util.Long.fromValue(object.negativeIntValue)).unsigned = false; + else if (typeof object.negativeIntValue === "string") + message.negativeIntValue = parseInt(object.negativeIntValue, 10); + else if (typeof object.negativeIntValue === "number") + message.negativeIntValue = object.negativeIntValue; + else if (typeof object.negativeIntValue === "object") + message.negativeIntValue = new $util.LongBits(object.negativeIntValue.low >>> 0, object.negativeIntValue.high >>> 0).toNumber(); + if (object.doubleValue != null) + message.doubleValue = Number(object.doubleValue); + if (object.stringValue != null) + if (typeof object.stringValue === "string") + $util.base64.decode(object.stringValue, message.stringValue = $util.newBuffer($util.base64.length(object.stringValue)), 0); + else if (object.stringValue.length >= 0) + message.stringValue = object.stringValue; + if (object.aggregateValue != null) + message.aggregateValue = String(object.aggregateValue); + return message; + }; + + /** + * Creates a plain object from an UninterpretedOption message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {google.protobuf.UninterpretedOption} message UninterpretedOption + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UninterpretedOption.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.name = []; + if (options.defaults) { + object.identifierValue = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, true); + object.positiveIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.positiveIntValue = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.negativeIntValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.negativeIntValue = options.longs === String ? "0" : 0; + object.doubleValue = 0; + if (options.bytes === String) + object.stringValue = ""; + else { + object.stringValue = []; + if (options.bytes !== Array) + object.stringValue = $util.newBuffer(object.stringValue); + } + object.aggregateValue = ""; + } + if (message.name && message.name.length) { + object.name = []; + for (var j = 0; j < message.name.length; ++j) + object.name[j] = $root.google.protobuf.UninterpretedOption.NamePart.toObject(message.name[j], options); + } + if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + object.identifierValue = message.identifierValue; + if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (typeof message.positiveIntValue === "number") + object.positiveIntValue = options.longs === String ? String(message.positiveIntValue) : message.positiveIntValue; + else + object.positiveIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.positiveIntValue) : options.longs === Number ? new $util.LongBits(message.positiveIntValue.low >>> 0, message.positiveIntValue.high >>> 0).toNumber(true) : message.positiveIntValue; + if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (typeof message.negativeIntValue === "number") + object.negativeIntValue = options.longs === String ? String(message.negativeIntValue) : message.negativeIntValue; + else + object.negativeIntValue = options.longs === String ? $util.Long.prototype.toString.call(message.negativeIntValue) : options.longs === Number ? new $util.LongBits(message.negativeIntValue.low >>> 0, message.negativeIntValue.high >>> 0).toNumber() : message.negativeIntValue; + if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + object.doubleValue = options.json && !isFinite(message.doubleValue) ? String(message.doubleValue) : message.doubleValue; + if (message.stringValue != null && message.hasOwnProperty("stringValue")) + object.stringValue = options.bytes === String ? $util.base64.encode(message.stringValue, 0, message.stringValue.length) : options.bytes === Array ? Array.prototype.slice.call(message.stringValue) : message.stringValue; + if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + object.aggregateValue = message.aggregateValue; + return object; + }; + + /** + * Converts this UninterpretedOption to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption + * @instance + * @returns {Object.} JSON object + */ + UninterpretedOption.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for UninterpretedOption + * @function getTypeUrl + * @memberof google.protobuf.UninterpretedOption + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UninterpretedOption.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UninterpretedOption"; + }; + + UninterpretedOption.NamePart = (function() { + + /** + * Properties of a NamePart. + * @memberof google.protobuf.UninterpretedOption + * @interface INamePart + * @property {string} namePart NamePart namePart + * @property {boolean} isExtension NamePart isExtension + */ + + /** + * Constructs a new NamePart. + * @memberof google.protobuf.UninterpretedOption + * @classdesc Represents a NamePart. + * @implements INamePart + * @constructor + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + */ + function NamePart(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * NamePart namePart. + * @member {string} namePart + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.namePart = ""; + + /** + * NamePart isExtension. + * @member {boolean} isExtension + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + */ + NamePart.prototype.isExtension = false; + + /** + * Creates a new NamePart instance using the specified properties. + * @function create + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart=} [properties] Properties to set + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart instance + */ + NamePart.create = function create(properties) { + return new NamePart(properties); + }; + + /** + * Encodes the specified NamePart message. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + writer.uint32(/* id 1, wireType 2 =*/10).string(message.namePart); + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.isExtension); + return writer; + }; + + /** + * Encodes the specified NamePart message, length delimited. Does not implicitly {@link google.protobuf.UninterpretedOption.NamePart.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.INamePart} message NamePart message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NamePart.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a NamePart message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.namePart = reader.string(); + break; + } + case 2: { + message.isExtension = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + if (!message.hasOwnProperty("namePart")) + throw $util.ProtocolError("missing required 'namePart'", { instance: message }); + if (!message.hasOwnProperty("isExtension")) + throw $util.ProtocolError("missing required 'isExtension'", { instance: message }); + return message; + }; + + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a NamePart message. + * @function verify + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + NamePart.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (!$util.isString(message.namePart)) + return "namePart: string expected"; + if (typeof message.isExtension !== "boolean") + return "isExtension: boolean expected"; + return null; + }; + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + */ + NamePart.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) + return object; + var message = new $root.google.protobuf.UninterpretedOption.NamePart(); + if (object.namePart != null) + message.namePart = String(object.namePart); + if (object.isExtension != null) + message.isExtension = Boolean(object.isExtension); + return message; + }; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + NamePart.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.namePart = ""; + object.isExtension = false; + } + if (message.namePart != null && message.hasOwnProperty("namePart")) + object.namePart = message.namePart; + if (message.isExtension != null && message.hasOwnProperty("isExtension")) + object.isExtension = message.isExtension; + return object; + }; + + /** + * Converts this NamePart to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + * @returns {Object.} JSON object + */ + NamePart.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for NamePart + * @function getTypeUrl + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + NamePart.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UninterpretedOption.NamePart"; + }; + + return NamePart; + })(); + + return UninterpretedOption; + })(); + + protobuf.SourceCodeInfo = (function() { + + /** + * Properties of a SourceCodeInfo. + * @memberof google.protobuf + * @interface ISourceCodeInfo + * @property {Array.|null} [location] SourceCodeInfo location + */ + + /** + * Constructs a new SourceCodeInfo. + * @memberof google.protobuf + * @classdesc Represents a SourceCodeInfo. + * @implements ISourceCodeInfo + * @constructor + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + */ + function SourceCodeInfo(properties) { + this.location = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SourceCodeInfo location. + * @member {Array.} location + * @memberof google.protobuf.SourceCodeInfo + * @instance + */ + SourceCodeInfo.prototype.location = $util.emptyArray; + + /** + * Creates a new SourceCodeInfo instance using the specified properties. + * @function create + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo instance + */ + SourceCodeInfo.create = function create(properties) { + return new SourceCodeInfo(properties); + }; + + /** + * Encodes the specified SourceCodeInfo message. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @function encode + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SourceCodeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.location != null && message.location.length) + for (var i = 0; i < message.location.length; ++i) + $root.google.protobuf.SourceCodeInfo.Location.encode(message.location[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified SourceCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.ISourceCodeInfo} message SourceCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SourceCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SourceCodeInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.location && message.location.length)) + message.location = []; + message.location.push($root.google.protobuf.SourceCodeInfo.Location.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SourceCodeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SourceCodeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SourceCodeInfo message. + * @function verify + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SourceCodeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.location != null && message.hasOwnProperty("location")) { + if (!Array.isArray(message.location)) + return "location: array expected"; + for (var i = 0; i < message.location.length; ++i) { + var error = $root.google.protobuf.SourceCodeInfo.Location.verify(message.location[i]); + if (error) + return "location." + error; + } + } + return null; + }; + + /** + * Creates a SourceCodeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.SourceCodeInfo} SourceCodeInfo + */ + SourceCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo) + return object; + var message = new $root.google.protobuf.SourceCodeInfo(); + if (object.location) { + if (!Array.isArray(object.location)) + throw TypeError(".google.protobuf.SourceCodeInfo.location: array expected"); + message.location = []; + for (var i = 0; i < object.location.length; ++i) { + if (typeof object.location[i] !== "object") + throw TypeError(".google.protobuf.SourceCodeInfo.location: object expected"); + message.location[i] = $root.google.protobuf.SourceCodeInfo.Location.fromObject(object.location[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a SourceCodeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {google.protobuf.SourceCodeInfo} message SourceCodeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SourceCodeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.location = []; + if (message.location && message.location.length) { + object.location = []; + for (var j = 0; j < message.location.length; ++j) + object.location[j] = $root.google.protobuf.SourceCodeInfo.Location.toObject(message.location[j], options); + } + return object; + }; + + /** + * Converts this SourceCodeInfo to JSON. + * @function toJSON + * @memberof google.protobuf.SourceCodeInfo + * @instance + * @returns {Object.} JSON object + */ + SourceCodeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SourceCodeInfo + * @function getTypeUrl + * @memberof google.protobuf.SourceCodeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SourceCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.SourceCodeInfo"; + }; + + SourceCodeInfo.Location = (function() { + + /** + * Properties of a Location. + * @memberof google.protobuf.SourceCodeInfo + * @interface ILocation + * @property {Array.|null} [path] Location path + * @property {Array.|null} [span] Location span + * @property {string|null} [leadingComments] Location leadingComments + * @property {string|null} [trailingComments] Location trailingComments + * @property {Array.|null} [leadingDetachedComments] Location leadingDetachedComments + */ + + /** + * Constructs a new Location. + * @memberof google.protobuf.SourceCodeInfo + * @classdesc Represents a Location. + * @implements ILocation + * @constructor + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set + */ + function Location(properties) { + this.path = []; + this.span = []; + this.leadingDetachedComments = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Location path. + * @member {Array.} path + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.path = $util.emptyArray; + + /** + * Location span. + * @member {Array.} span + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.span = $util.emptyArray; + + /** + * Location leadingComments. + * @member {string} leadingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingComments = ""; + + /** + * Location trailingComments. + * @member {string} trailingComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.trailingComments = ""; + + /** + * Location leadingDetachedComments. + * @member {Array.} leadingDetachedComments + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + */ + Location.prototype.leadingDetachedComments = $util.emptyArray; + + /** + * Creates a new Location instance using the specified properties. + * @function create + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation=} [properties] Properties to set + * @returns {google.protobuf.SourceCodeInfo.Location} Location instance + */ + Location.create = function create(properties) { + return new Location(properties); + }; + + /** + * Encodes the specified Location message. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @function encode + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Location.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.span != null && message.span.length) { + writer.uint32(/* id 2, wireType 2 =*/18).fork(); + for (var i = 0; i < message.span.length; ++i) + writer.int32(message.span[i]); + writer.ldelim(); + } + if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); + if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); + if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.leadingDetachedComments[i]); + return writer; + }; + + /** + * Encodes the specified Location message, length delimited. Does not implicitly {@link google.protobuf.SourceCodeInfo.Location.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.ILocation} message Location message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Location.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Location message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.SourceCodeInfo.Location} Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Location.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); + break; + } + case 2: { + if (!(message.span && message.span.length)) + message.span = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.span.push(reader.int32()); + } else + message.span.push(reader.int32()); + break; + } + case 3: { + message.leadingComments = reader.string(); + break; + } + case 4: { + message.trailingComments = reader.string(); + break; + } + case 6: { + if (!(message.leadingDetachedComments && message.leadingDetachedComments.length)) + message.leadingDetachedComments = []; + message.leadingDetachedComments.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Location message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.SourceCodeInfo.Location} Location + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Location.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Location message. + * @function verify + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Location.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; + } + if (message.span != null && message.hasOwnProperty("span")) { + if (!Array.isArray(message.span)) + return "span: array expected"; + for (var i = 0; i < message.span.length; ++i) + if (!$util.isInteger(message.span[i])) + return "span: integer[] expected"; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + if (!$util.isString(message.leadingComments)) + return "leadingComments: string expected"; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + if (!$util.isString(message.trailingComments)) + return "trailingComments: string expected"; + if (message.leadingDetachedComments != null && message.hasOwnProperty("leadingDetachedComments")) { + if (!Array.isArray(message.leadingDetachedComments)) + return "leadingDetachedComments: array expected"; + for (var i = 0; i < message.leadingDetachedComments.length; ++i) + if (!$util.isString(message.leadingDetachedComments[i])) + return "leadingDetachedComments: string[] expected"; + } + return null; + }; + + /** + * Creates a Location message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.SourceCodeInfo.Location} Location + */ + Location.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.SourceCodeInfo.Location) + return object; + var message = new $root.google.protobuf.SourceCodeInfo.Location(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.span) { + if (!Array.isArray(object.span)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.span: array expected"); + message.span = []; + for (var i = 0; i < object.span.length; ++i) + message.span[i] = object.span[i] | 0; + } + if (object.leadingComments != null) + message.leadingComments = String(object.leadingComments); + if (object.trailingComments != null) + message.trailingComments = String(object.trailingComments); + if (object.leadingDetachedComments) { + if (!Array.isArray(object.leadingDetachedComments)) + throw TypeError(".google.protobuf.SourceCodeInfo.Location.leadingDetachedComments: array expected"); + message.leadingDetachedComments = []; + for (var i = 0; i < object.leadingDetachedComments.length; ++i) + message.leadingDetachedComments[i] = String(object.leadingDetachedComments[i]); + } + return message; + }; + + /** + * Creates a plain object from a Location message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {google.protobuf.SourceCodeInfo.Location} message Location + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Location.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.path = []; + object.span = []; + object.leadingDetachedComments = []; + } + if (options.defaults) { + object.leadingComments = ""; + object.trailingComments = ""; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.span && message.span.length) { + object.span = []; + for (var j = 0; j < message.span.length; ++j) + object.span[j] = message.span[j]; + } + if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + object.leadingComments = message.leadingComments; + if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + object.trailingComments = message.trailingComments; + if (message.leadingDetachedComments && message.leadingDetachedComments.length) { + object.leadingDetachedComments = []; + for (var j = 0; j < message.leadingDetachedComments.length; ++j) + object.leadingDetachedComments[j] = message.leadingDetachedComments[j]; + } + return object; + }; + + /** + * Converts this Location to JSON. + * @function toJSON + * @memberof google.protobuf.SourceCodeInfo.Location + * @instance + * @returns {Object.} JSON object + */ + Location.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Location + * @function getTypeUrl + * @memberof google.protobuf.SourceCodeInfo.Location + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Location.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.SourceCodeInfo.Location"; + }; + + return Location; + })(); + + return SourceCodeInfo; + })(); + + protobuf.GeneratedCodeInfo = (function() { + + /** + * Properties of a GeneratedCodeInfo. + * @memberof google.protobuf + * @interface IGeneratedCodeInfo + * @property {Array.|null} [annotation] GeneratedCodeInfo annotation + */ + + /** + * Constructs a new GeneratedCodeInfo. + * @memberof google.protobuf + * @classdesc Represents a GeneratedCodeInfo. + * @implements IGeneratedCodeInfo + * @constructor + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + */ + function GeneratedCodeInfo(properties) { + this.annotation = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GeneratedCodeInfo annotation. + * @member {Array.} annotation + * @memberof google.protobuf.GeneratedCodeInfo + * @instance + */ + GeneratedCodeInfo.prototype.annotation = $util.emptyArray; + + /** + * Creates a new GeneratedCodeInfo instance using the specified properties. + * @function create + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo instance + */ + GeneratedCodeInfo.create = function create(properties) { + return new GeneratedCodeInfo(properties); + }; + + /** + * Encodes the specified GeneratedCodeInfo message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @function encode + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GeneratedCodeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.annotation != null && message.annotation.length) + for (var i = 0; i < message.annotation.length; ++i) + $root.google.protobuf.GeneratedCodeInfo.Annotation.encode(message.annotation[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified GeneratedCodeInfo message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.IGeneratedCodeInfo} message GeneratedCodeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GeneratedCodeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GeneratedCodeInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.annotation && message.annotation.length)) + message.annotation = []; + message.annotation.push($root.google.protobuf.GeneratedCodeInfo.Annotation.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a GeneratedCodeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GeneratedCodeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a GeneratedCodeInfo message. + * @function verify + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GeneratedCodeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.annotation != null && message.hasOwnProperty("annotation")) { + if (!Array.isArray(message.annotation)) + return "annotation: array expected"; + for (var i = 0; i < message.annotation.length; ++i) { + var error = $root.google.protobuf.GeneratedCodeInfo.Annotation.verify(message.annotation[i]); + if (error) + return "annotation." + error; + } + } + return null; + }; + + /** + * Creates a GeneratedCodeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo} GeneratedCodeInfo + */ + GeneratedCodeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo(); + if (object.annotation) { + if (!Array.isArray(object.annotation)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: array expected"); + message.annotation = []; + for (var i = 0; i < object.annotation.length; ++i) { + if (typeof object.annotation[i] !== "object") + throw TypeError(".google.protobuf.GeneratedCodeInfo.annotation: object expected"); + message.annotation[i] = $root.google.protobuf.GeneratedCodeInfo.Annotation.fromObject(object.annotation[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a GeneratedCodeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {google.protobuf.GeneratedCodeInfo} message GeneratedCodeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + GeneratedCodeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.annotation = []; + if (message.annotation && message.annotation.length) { + object.annotation = []; + for (var j = 0; j < message.annotation.length; ++j) + object.annotation[j] = $root.google.protobuf.GeneratedCodeInfo.Annotation.toObject(message.annotation[j], options); + } + return object; + }; + + /** + * Converts this GeneratedCodeInfo to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo + * @instance + * @returns {Object.} JSON object + */ + GeneratedCodeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for GeneratedCodeInfo + * @function getTypeUrl + * @memberof google.protobuf.GeneratedCodeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + GeneratedCodeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo"; + }; + + GeneratedCodeInfo.Annotation = (function() { + + /** + * Properties of an Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @interface IAnnotation + * @property {Array.|null} [path] Annotation path + * @property {string|null} [sourceFile] Annotation sourceFile + * @property {number|null} [begin] Annotation begin + * @property {number|null} [end] Annotation end + * @property {google.protobuf.GeneratedCodeInfo.Annotation.Semantic|null} [semantic] Annotation semantic + */ + + /** + * Constructs a new Annotation. + * @memberof google.protobuf.GeneratedCodeInfo + * @classdesc Represents an Annotation. + * @implements IAnnotation + * @constructor + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + */ + function Annotation(properties) { + this.path = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Annotation path. + * @member {Array.} path + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.path = $util.emptyArray; + + /** + * Annotation sourceFile. + * @member {string} sourceFile + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.sourceFile = ""; + + /** + * Annotation begin. + * @member {number} begin + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.begin = 0; + + /** + * Annotation end. + * @member {number} end + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.end = 0; + + /** + * Annotation semantic. + * @member {google.protobuf.GeneratedCodeInfo.Annotation.Semantic} semantic + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + */ + Annotation.prototype.semantic = 0; + + /** + * Creates a new Annotation instance using the specified properties. + * @function create + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation=} [properties] Properties to set + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation instance + */ + Annotation.create = function create(properties) { + return new Annotation(properties); + }; + + /** + * Encodes the specified Annotation message. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.path != null && message.path.length) { + writer.uint32(/* id 1, wireType 2 =*/10).fork(); + for (var i = 0; i < message.path.length; ++i) + writer.int32(message.path[i]); + writer.ldelim(); + } + if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); + if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); + if (message.end != null && Object.hasOwnProperty.call(message, "end")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); + if (message.semantic != null && Object.hasOwnProperty.call(message, "semantic")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.semantic); + return writer; + }; + + /** + * Encodes the specified Annotation message, length delimited. Does not implicitly {@link google.protobuf.GeneratedCodeInfo.Annotation.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.IAnnotation} message Annotation message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Annotation.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Annotation message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.path && message.path.length)) + message.path = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.path.push(reader.int32()); + } else + message.path.push(reader.int32()); + break; + } + case 2: { + message.sourceFile = reader.string(); + break; + } + case 3: { + message.begin = reader.int32(); + break; + } + case 4: { + message.end = reader.int32(); + break; + } + case 5: { + message.semantic = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Annotation message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Annotation.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Annotation message. + * @function verify + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Annotation.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.path != null && message.hasOwnProperty("path")) { + if (!Array.isArray(message.path)) + return "path: array expected"; + for (var i = 0; i < message.path.length; ++i) + if (!$util.isInteger(message.path[i])) + return "path: integer[] expected"; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + if (!$util.isString(message.sourceFile)) + return "sourceFile: string expected"; + if (message.begin != null && message.hasOwnProperty("begin")) + if (!$util.isInteger(message.begin)) + return "begin: integer expected"; + if (message.end != null && message.hasOwnProperty("end")) + if (!$util.isInteger(message.end)) + return "end: integer expected"; + if (message.semantic != null && message.hasOwnProperty("semantic")) + switch (message.semantic) { + default: + return "semantic: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Creates an Annotation message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.GeneratedCodeInfo.Annotation} Annotation + */ + Annotation.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.GeneratedCodeInfo.Annotation) + return object; + var message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); + if (object.path) { + if (!Array.isArray(object.path)) + throw TypeError(".google.protobuf.GeneratedCodeInfo.Annotation.path: array expected"); + message.path = []; + for (var i = 0; i < object.path.length; ++i) + message.path[i] = object.path[i] | 0; + } + if (object.sourceFile != null) + message.sourceFile = String(object.sourceFile); + if (object.begin != null) + message.begin = object.begin | 0; + if (object.end != null) + message.end = object.end | 0; + switch (object.semantic) { + default: + if (typeof object.semantic === "number") { + message.semantic = object.semantic; + break; + } + break; + case "NONE": + case 0: + message.semantic = 0; + break; + case "SET": + case 1: + message.semantic = 1; + break; + case "ALIAS": + case 2: + message.semantic = 2; + break; + } + return message; + }; + + /** + * Creates a plain object from an Annotation message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {google.protobuf.GeneratedCodeInfo.Annotation} message Annotation + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Annotation.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.path = []; + if (options.defaults) { + object.sourceFile = ""; + object.begin = 0; + object.end = 0; + object.semantic = options.enums === String ? "NONE" : 0; + } + if (message.path && message.path.length) { + object.path = []; + for (var j = 0; j < message.path.length; ++j) + object.path[j] = message.path[j]; + } + if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + object.sourceFile = message.sourceFile; + if (message.begin != null && message.hasOwnProperty("begin")) + object.begin = message.begin; + if (message.end != null && message.hasOwnProperty("end")) + object.end = message.end; + if (message.semantic != null && message.hasOwnProperty("semantic")) + object.semantic = options.enums === String ? $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] === undefined ? message.semantic : $root.google.protobuf.GeneratedCodeInfo.Annotation.Semantic[message.semantic] : message.semantic; + return object; + }; + + /** + * Converts this Annotation to JSON. + * @function toJSON + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @instance + * @returns {Object.} JSON object + */ + Annotation.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Annotation + * @function getTypeUrl + * @memberof google.protobuf.GeneratedCodeInfo.Annotation + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Annotation.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.GeneratedCodeInfo.Annotation"; + }; + + /** + * Semantic enum. + * @name google.protobuf.GeneratedCodeInfo.Annotation.Semantic + * @enum {number} + * @property {number} NONE=0 NONE value + * @property {number} SET=1 SET value + * @property {number} ALIAS=2 ALIAS value + */ + Annotation.Semantic = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "NONE"] = 0; + values[valuesById[1] = "SET"] = 1; + values[valuesById[2] = "ALIAS"] = 2; + return values; + })(); + + return Annotation; + })(); + + return GeneratedCodeInfo; + })(); + + protobuf.Timestamp = (function() { + + /** + * Properties of a Timestamp. + * @memberof google.protobuf + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos + */ + + /** + * Constructs a new Timestamp. + * @memberof google.protobuf + * @classdesc Represents a Timestamp. + * @implements ITimestamp + * @constructor + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + */ + function Timestamp(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance + */ + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + + return Timestamp; + })(); + + protobuf.DoubleValue = (function() { + + /** + * Properties of a DoubleValue. + * @memberof google.protobuf + * @interface IDoubleValue + * @property {number|null} [value] DoubleValue value + */ + + /** + * Constructs a new DoubleValue. + * @memberof google.protobuf + * @classdesc Represents a DoubleValue. + * @implements IDoubleValue + * @constructor + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + */ + function DoubleValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DoubleValue value. + * @member {number} value + * @memberof google.protobuf.DoubleValue + * @instance + */ + DoubleValue.prototype.value = 0; + + /** + * Creates a new DoubleValue instance using the specified properties. + * @function create + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + * @returns {google.protobuf.DoubleValue} DoubleValue instance + */ + DoubleValue.create = function create(properties) { + return new DoubleValue(properties); + }; + + /** + * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DoubleValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); + return writer; + }; + + /** + * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DoubleValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DoubleValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DoubleValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DoubleValue} DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DoubleValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DoubleValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DoubleValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.DoubleValue} DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DoubleValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DoubleValue message. + * @function verify + * @memberof google.protobuf.DoubleValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DoubleValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "number") + return "value: number expected"; + return null; + }; + + /** + * Creates a DoubleValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.DoubleValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.DoubleValue} DoubleValue + */ + DoubleValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.DoubleValue) + return object; + var message = new $root.google.protobuf.DoubleValue(); + if (object.value != null) + message.value = Number(object.value); + return message; + }; + + /** + * Creates a plain object from a DoubleValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.DoubleValue} message DoubleValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DoubleValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; + return object; + }; + + /** + * Converts this DoubleValue to JSON. + * @function toJSON + * @memberof google.protobuf.DoubleValue + * @instance + * @returns {Object.} JSON object + */ + DoubleValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DoubleValue + * @function getTypeUrl + * @memberof google.protobuf.DoubleValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DoubleValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.DoubleValue"; + }; + + return DoubleValue; + })(); + + protobuf.FloatValue = (function() { + + /** + * Properties of a FloatValue. + * @memberof google.protobuf + * @interface IFloatValue + * @property {number|null} [value] FloatValue value + */ + + /** + * Constructs a new FloatValue. + * @memberof google.protobuf + * @classdesc Represents a FloatValue. + * @implements IFloatValue + * @constructor + * @param {google.protobuf.IFloatValue=} [properties] Properties to set + */ + function FloatValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FloatValue value. + * @member {number} value + * @memberof google.protobuf.FloatValue + * @instance + */ + FloatValue.prototype.value = 0; + + /** + * Creates a new FloatValue instance using the specified properties. + * @function create + * @memberof google.protobuf.FloatValue + * @static + * @param {google.protobuf.IFloatValue=} [properties] Properties to set + * @returns {google.protobuf.FloatValue} FloatValue instance + */ + FloatValue.create = function create(properties) { + return new FloatValue(properties); + }; + + /** + * Encodes the specified FloatValue message. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FloatValue + * @static + * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FloatValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 5 =*/13).float(message.value); + return writer; + }; + + /** + * Encodes the specified FloatValue message, length delimited. Does not implicitly {@link google.protobuf.FloatValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FloatValue + * @static + * @param {google.protobuf.IFloatValue} message FloatValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FloatValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FloatValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FloatValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FloatValue} FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FloatValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FloatValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.float(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FloatValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FloatValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FloatValue} FloatValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FloatValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FloatValue message. + * @function verify + * @memberof google.protobuf.FloatValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FloatValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "number") + return "value: number expected"; + return null; + }; + + /** + * Creates a FloatValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FloatValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FloatValue} FloatValue + */ + FloatValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FloatValue) + return object; + var message = new $root.google.protobuf.FloatValue(); + if (object.value != null) + message.value = Number(object.value); + return message; + }; + + /** + * Creates a plain object from a FloatValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FloatValue + * @static + * @param {google.protobuf.FloatValue} message FloatValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FloatValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.json && !isFinite(message.value) ? String(message.value) : message.value; + return object; + }; + + /** + * Converts this FloatValue to JSON. + * @function toJSON + * @memberof google.protobuf.FloatValue + * @instance + * @returns {Object.} JSON object + */ + FloatValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FloatValue + * @function getTypeUrl + * @memberof google.protobuf.FloatValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FloatValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FloatValue"; + }; + + return FloatValue; + })(); + + protobuf.Int64Value = (function() { + + /** + * Properties of an Int64Value. + * @memberof google.protobuf + * @interface IInt64Value + * @property {number|Long|null} [value] Int64Value value + */ + + /** + * Constructs a new Int64Value. + * @memberof google.protobuf + * @classdesc Represents an Int64Value. + * @implements IInt64Value + * @constructor + * @param {google.protobuf.IInt64Value=} [properties] Properties to set + */ + function Int64Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Int64Value value. + * @member {number|Long} value + * @memberof google.protobuf.Int64Value + * @instance + */ + Int64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new Int64Value instance using the specified properties. + * @function create + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value=} [properties] Properties to set + * @returns {google.protobuf.Int64Value} Int64Value instance + */ + Int64Value.create = function create(properties) { + return new Int64Value(properties); + }; + + /** + * Encodes the specified Int64Value message. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int64Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.value); + return writer; + }; + + /** + * Encodes the specified Int64Value message, length delimited. Does not implicitly {@link google.protobuf.Int64Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.IInt64Value} message Int64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int64Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Int64Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Int64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Int64Value} Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int64Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int64Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Int64Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Int64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Int64Value} Int64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int64Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Int64Value message. + * @function verify + * @memberof google.protobuf.Int64Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Int64Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) + return "value: integer|Long expected"; + return null; + }; + + /** + * Creates an Int64Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Int64Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Int64Value} Int64Value + */ + Int64Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Int64Value) + return object; + var message = new $root.google.protobuf.Int64Value(); + if (object.value != null) + if ($util.Long) + (message.value = $util.Long.fromValue(object.value)).unsigned = false; + else if (typeof object.value === "string") + message.value = parseInt(object.value, 10); + else if (typeof object.value === "number") + message.value = object.value; + else if (typeof object.value === "object") + message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from an Int64Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Int64Value + * @static + * @param {google.protobuf.Int64Value} message Int64Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Int64Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.value = options.longs === String ? "0" : 0; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value === "number") + object.value = options.longs === String ? String(message.value) : message.value; + else + object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber() : message.value; + return object; + }; + + /** + * Converts this Int64Value to JSON. + * @function toJSON + * @memberof google.protobuf.Int64Value + * @instance + * @returns {Object.} JSON object + */ + Int64Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Int64Value + * @function getTypeUrl + * @memberof google.protobuf.Int64Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Int64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Int64Value"; + }; + + return Int64Value; + })(); + + protobuf.UInt64Value = (function() { + + /** + * Properties of a UInt64Value. + * @memberof google.protobuf + * @interface IUInt64Value + * @property {number|Long|null} [value] UInt64Value value + */ + + /** + * Constructs a new UInt64Value. + * @memberof google.protobuf + * @classdesc Represents a UInt64Value. + * @implements IUInt64Value + * @constructor + * @param {google.protobuf.IUInt64Value=} [properties] Properties to set + */ + function UInt64Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UInt64Value value. + * @member {number|Long} value + * @memberof google.protobuf.UInt64Value + * @instance + */ + UInt64Value.prototype.value = $util.Long ? $util.Long.fromBits(0,0,true) : 0; + + /** + * Creates a new UInt64Value instance using the specified properties. + * @function create + * @memberof google.protobuf.UInt64Value + * @static + * @param {google.protobuf.IUInt64Value=} [properties] Properties to set + * @returns {google.protobuf.UInt64Value} UInt64Value instance + */ + UInt64Value.create = function create(properties) { + return new UInt64Value(properties); + }; + + /** + * Encodes the specified UInt64Value message. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UInt64Value + * @static + * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UInt64Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).uint64(message.value); + return writer; + }; + + /** + * Encodes the specified UInt64Value message, length delimited. Does not implicitly {@link google.protobuf.UInt64Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UInt64Value + * @static + * @param {google.protobuf.IUInt64Value} message UInt64Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UInt64Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a UInt64Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UInt64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UInt64Value} UInt64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UInt64Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt64Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.uint64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a UInt64Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UInt64Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UInt64Value} UInt64Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UInt64Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a UInt64Value message. + * @function verify + * @memberof google.protobuf.UInt64Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UInt64Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value) && !(message.value && $util.isInteger(message.value.low) && $util.isInteger(message.value.high))) + return "value: integer|Long expected"; + return null; + }; + + /** + * Creates a UInt64Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UInt64Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UInt64Value} UInt64Value + */ + UInt64Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UInt64Value) + return object; + var message = new $root.google.protobuf.UInt64Value(); + if (object.value != null) + if ($util.Long) + (message.value = $util.Long.fromValue(object.value)).unsigned = true; + else if (typeof object.value === "string") + message.value = parseInt(object.value, 10); + else if (typeof object.value === "number") + message.value = object.value; + else if (typeof object.value === "object") + message.value = new $util.LongBits(object.value.low >>> 0, object.value.high >>> 0).toNumber(true); + return message; + }; + + /** + * Creates a plain object from a UInt64Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UInt64Value + * @static + * @param {google.protobuf.UInt64Value} message UInt64Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UInt64Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if ($util.Long) { + var long = new $util.Long(0, 0, true); + object.value = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.value = options.longs === String ? "0" : 0; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value === "number") + object.value = options.longs === String ? String(message.value) : message.value; + else + object.value = options.longs === String ? $util.Long.prototype.toString.call(message.value) : options.longs === Number ? new $util.LongBits(message.value.low >>> 0, message.value.high >>> 0).toNumber(true) : message.value; + return object; + }; + + /** + * Converts this UInt64Value to JSON. + * @function toJSON + * @memberof google.protobuf.UInt64Value + * @instance + * @returns {Object.} JSON object + */ + UInt64Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for UInt64Value + * @function getTypeUrl + * @memberof google.protobuf.UInt64Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UInt64Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UInt64Value"; + }; + + return UInt64Value; + })(); + + protobuf.Int32Value = (function() { + + /** + * Properties of an Int32Value. + * @memberof google.protobuf + * @interface IInt32Value + * @property {number|null} [value] Int32Value value + */ + + /** + * Constructs a new Int32Value. + * @memberof google.protobuf + * @classdesc Represents an Int32Value. + * @implements IInt32Value + * @constructor + * @param {google.protobuf.IInt32Value=} [properties] Properties to set + */ + function Int32Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Int32Value value. + * @member {number} value + * @memberof google.protobuf.Int32Value + * @instance + */ + Int32Value.prototype.value = 0; + + /** + * Creates a new Int32Value instance using the specified properties. + * @function create + * @memberof google.protobuf.Int32Value + * @static + * @param {google.protobuf.IInt32Value=} [properties] Properties to set + * @returns {google.protobuf.Int32Value} Int32Value instance + */ + Int32Value.create = function create(properties) { + return new Int32Value(properties); + }; + + /** + * Encodes the specified Int32Value message. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Int32Value + * @static + * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int32Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.value); + return writer; + }; + + /** + * Encodes the specified Int32Value message, length delimited. Does not implicitly {@link google.protobuf.Int32Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Int32Value + * @static + * @param {google.protobuf.IInt32Value} message Int32Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Int32Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Int32Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Int32Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Int32Value} Int32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int32Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int32Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Int32Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Int32Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Int32Value} Int32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Int32Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Int32Value message. + * @function verify + * @memberof google.protobuf.Int32Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Int32Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value)) + return "value: integer expected"; + return null; + }; + + /** + * Creates an Int32Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Int32Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Int32Value} Int32Value + */ + Int32Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Int32Value) + return object; + var message = new $root.google.protobuf.Int32Value(); + if (object.value != null) + message.value = object.value | 0; + return message; + }; + + /** + * Creates a plain object from an Int32Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Int32Value + * @static + * @param {google.protobuf.Int32Value} message Int32Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Int32Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + return object; + }; + + /** + * Converts this Int32Value to JSON. + * @function toJSON + * @memberof google.protobuf.Int32Value + * @instance + * @returns {Object.} JSON object + */ + Int32Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Int32Value + * @function getTypeUrl + * @memberof google.protobuf.Int32Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Int32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Int32Value"; + }; + + return Int32Value; + })(); + + protobuf.UInt32Value = (function() { + + /** + * Properties of a UInt32Value. + * @memberof google.protobuf + * @interface IUInt32Value + * @property {number|null} [value] UInt32Value value + */ + + /** + * Constructs a new UInt32Value. + * @memberof google.protobuf + * @classdesc Represents a UInt32Value. + * @implements IUInt32Value + * @constructor + * @param {google.protobuf.IUInt32Value=} [properties] Properties to set + */ + function UInt32Value(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UInt32Value value. + * @member {number} value + * @memberof google.protobuf.UInt32Value + * @instance + */ + UInt32Value.prototype.value = 0; + + /** + * Creates a new UInt32Value instance using the specified properties. + * @function create + * @memberof google.protobuf.UInt32Value + * @static + * @param {google.protobuf.IUInt32Value=} [properties] Properties to set + * @returns {google.protobuf.UInt32Value} UInt32Value instance + */ + UInt32Value.create = function create(properties) { + return new UInt32Value(properties); + }; + + /** + * Encodes the specified UInt32Value message. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @function encode + * @memberof google.protobuf.UInt32Value + * @static + * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UInt32Value.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).uint32(message.value); + return writer; + }; + + /** + * Encodes the specified UInt32Value message, length delimited. Does not implicitly {@link google.protobuf.UInt32Value.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.UInt32Value + * @static + * @param {google.protobuf.IUInt32Value} message UInt32Value message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UInt32Value.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a UInt32Value message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.UInt32Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.UInt32Value} UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UInt32Value.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt32Value(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.uint32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a UInt32Value message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UInt32Value + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UInt32Value} UInt32Value + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UInt32Value.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a UInt32Value message. + * @function verify + * @memberof google.protobuf.UInt32Value + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UInt32Value.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isInteger(message.value)) + return "value: integer expected"; + return null; + }; + + /** + * Creates a UInt32Value message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UInt32Value + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UInt32Value} UInt32Value + */ + UInt32Value.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UInt32Value) + return object; + var message = new $root.google.protobuf.UInt32Value(); + if (object.value != null) + message.value = object.value >>> 0; + return message; + }; + + /** + * Creates a plain object from a UInt32Value message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UInt32Value + * @static + * @param {google.protobuf.UInt32Value} message UInt32Value + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UInt32Value.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = 0; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + return object; + }; + + /** + * Converts this UInt32Value to JSON. + * @function toJSON + * @memberof google.protobuf.UInt32Value + * @instance + * @returns {Object.} JSON object + */ + UInt32Value.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for UInt32Value + * @function getTypeUrl + * @memberof google.protobuf.UInt32Value + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UInt32Value.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UInt32Value"; + }; + + return UInt32Value; + })(); + + protobuf.BoolValue = (function() { + + /** + * Properties of a BoolValue. + * @memberof google.protobuf + * @interface IBoolValue + * @property {boolean|null} [value] BoolValue value + */ + + /** + * Constructs a new BoolValue. + * @memberof google.protobuf + * @classdesc Represents a BoolValue. + * @implements IBoolValue + * @constructor + * @param {google.protobuf.IBoolValue=} [properties] Properties to set + */ + function BoolValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BoolValue value. + * @member {boolean} value + * @memberof google.protobuf.BoolValue + * @instance + */ + BoolValue.prototype.value = false; + + /** + * Creates a new BoolValue instance using the specified properties. + * @function create + * @memberof google.protobuf.BoolValue + * @static + * @param {google.protobuf.IBoolValue=} [properties] Properties to set + * @returns {google.protobuf.BoolValue} BoolValue instance + */ + BoolValue.create = function create(properties) { + return new BoolValue(properties); + }; + + /** + * Encodes the specified BoolValue message. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.BoolValue + * @static + * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BoolValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.value); + return writer; + }; + + /** + * Encodes the specified BoolValue message, length delimited. Does not implicitly {@link google.protobuf.BoolValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.BoolValue + * @static + * @param {google.protobuf.IBoolValue} message BoolValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BoolValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BoolValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.BoolValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.BoolValue} BoolValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BoolValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BoolValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BoolValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.BoolValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.BoolValue} BoolValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BoolValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BoolValue message. + * @function verify + * @memberof google.protobuf.BoolValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BoolValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (typeof message.value !== "boolean") + return "value: boolean expected"; + return null; + }; + + /** + * Creates a BoolValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.BoolValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.BoolValue} BoolValue + */ + BoolValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.BoolValue) + return object; + var message = new $root.google.protobuf.BoolValue(); + if (object.value != null) + message.value = Boolean(object.value); + return message; + }; + + /** + * Creates a plain object from a BoolValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.BoolValue + * @static + * @param {google.protobuf.BoolValue} message BoolValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BoolValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = false; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + return object; + }; + + /** + * Converts this BoolValue to JSON. + * @function toJSON + * @memberof google.protobuf.BoolValue + * @instance + * @returns {Object.} JSON object + */ + BoolValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BoolValue + * @function getTypeUrl + * @memberof google.protobuf.BoolValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BoolValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.BoolValue"; + }; + + return BoolValue; + })(); + + protobuf.StringValue = (function() { + + /** + * Properties of a StringValue. + * @memberof google.protobuf + * @interface IStringValue + * @property {string|null} [value] StringValue value + */ + + /** + * Constructs a new StringValue. + * @memberof google.protobuf + * @classdesc Represents a StringValue. + * @implements IStringValue + * @constructor + * @param {google.protobuf.IStringValue=} [properties] Properties to set + */ + function StringValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StringValue value. + * @member {string} value + * @memberof google.protobuf.StringValue + * @instance + */ + StringValue.prototype.value = ""; + + /** + * Creates a new StringValue instance using the specified properties. + * @function create + * @memberof google.protobuf.StringValue + * @static + * @param {google.protobuf.IStringValue=} [properties] Properties to set + * @returns {google.protobuf.StringValue} StringValue instance + */ + StringValue.create = function create(properties) { + return new StringValue(properties); + }; + + /** + * Encodes the specified StringValue message. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.StringValue + * @static + * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StringValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.value); + return writer; + }; + + /** + * Encodes the specified StringValue message, length delimited. Does not implicitly {@link google.protobuf.StringValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.StringValue + * @static + * @param {google.protobuf.IStringValue} message StringValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StringValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StringValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.StringValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.StringValue} StringValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StringValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.StringValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StringValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.StringValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.StringValue} StringValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StringValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StringValue message. + * @function verify + * @memberof google.protobuf.StringValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StringValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isString(message.value)) + return "value: string expected"; + return null; + }; + + /** + * Creates a StringValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.StringValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.StringValue} StringValue + */ + StringValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.StringValue) + return object; + var message = new $root.google.protobuf.StringValue(); + if (object.value != null) + message.value = String(object.value); + return message; + }; + + /** + * Creates a plain object from a StringValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.StringValue + * @static + * @param {google.protobuf.StringValue} message StringValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StringValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.value = ""; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + return object; + }; + + /** + * Converts this StringValue to JSON. + * @function toJSON + * @memberof google.protobuf.StringValue + * @instance + * @returns {Object.} JSON object + */ + StringValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StringValue + * @function getTypeUrl + * @memberof google.protobuf.StringValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StringValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.StringValue"; + }; + + return StringValue; + })(); + + protobuf.BytesValue = (function() { + + /** + * Properties of a BytesValue. + * @memberof google.protobuf + * @interface IBytesValue + * @property {Uint8Array|null} [value] BytesValue value + */ + + /** + * Constructs a new BytesValue. + * @memberof google.protobuf + * @classdesc Represents a BytesValue. + * @implements IBytesValue + * @constructor + * @param {google.protobuf.IBytesValue=} [properties] Properties to set + */ + function BytesValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BytesValue value. + * @member {Uint8Array} value + * @memberof google.protobuf.BytesValue + * @instance + */ + BytesValue.prototype.value = $util.newBuffer([]); + + /** + * Creates a new BytesValue instance using the specified properties. + * @function create + * @memberof google.protobuf.BytesValue + * @static + * @param {google.protobuf.IBytesValue=} [properties] Properties to set + * @returns {google.protobuf.BytesValue} BytesValue instance + */ + BytesValue.create = function create(properties) { + return new BytesValue(properties); + }; + + /** + * Encodes the specified BytesValue message. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.BytesValue + * @static + * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BytesValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.value); + return writer; + }; + + /** + * Encodes the specified BytesValue message, length delimited. Does not implicitly {@link google.protobuf.BytesValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.BytesValue + * @static + * @param {google.protobuf.IBytesValue} message BytesValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BytesValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BytesValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.BytesValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.BytesValue} BytesValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BytesValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BytesValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BytesValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.BytesValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.BytesValue} BytesValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BytesValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BytesValue message. + * @function verify + * @memberof google.protobuf.BytesValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BytesValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; + return null; + }; + + /** + * Creates a BytesValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.BytesValue + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.BytesValue} BytesValue + */ + BytesValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.BytesValue) + return object; + var message = new $root.google.protobuf.BytesValue(); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length >= 0) + message.value = object.value; + return message; + }; + + /** + * Creates a plain object from a BytesValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.BytesValue + * @static + * @param {google.protobuf.BytesValue} message BytesValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BytesValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; + return object; + }; + + /** + * Converts this BytesValue to JSON. + * @function toJSON + * @memberof google.protobuf.BytesValue + * @instance + * @returns {Object.} JSON object + */ + BytesValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BytesValue + * @function getTypeUrl + * @memberof google.protobuf.BytesValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BytesValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.BytesValue"; + }; + + return BytesValue; + })(); + + protobuf.Any = (function() { + + /** + * Properties of an Any. + * @memberof google.protobuf + * @interface IAny + * @property {string|null} [type_url] Any type_url + * @property {Uint8Array|null} [value] Any value + */ + + /** + * Constructs a new Any. + * @memberof google.protobuf + * @classdesc Represents an Any. + * @implements IAny + * @constructor + * @param {google.protobuf.IAny=} [properties] Properties to set + */ + function Any(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Any type_url. + * @member {string} type_url + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.type_url = ""; + + /** + * Any value. + * @member {Uint8Array} value + * @memberof google.protobuf.Any + * @instance + */ + Any.prototype.value = $util.newBuffer([]); + + /** + * Creates a new Any instance using the specified properties. + * @function create + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny=} [properties] Properties to set + * @returns {google.protobuf.Any} Any instance + */ + Any.create = function create(properties) { + return new Any(properties); + }; + + /** + * Encodes the specified Any message. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type_url != null && Object.hasOwnProperty.call(message, "type_url")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type_url); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); + return writer; + }; + + /** + * Encodes the specified Any message, length delimited. Does not implicitly {@link google.protobuf.Any.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.IAny} message Any message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Any.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Any message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.type_url = reader.string(); + break; + } + case 2: { + message.value = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Any message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Any + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Any} Any + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Any.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Any message. + * @function verify + * @memberof google.protobuf.Any + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Any.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type_url != null && message.hasOwnProperty("type_url")) + if (!$util.isString(message.type_url)) + return "type_url: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!(message.value && typeof message.value.length === "number" || $util.isString(message.value))) + return "value: buffer expected"; + return null; + }; + + /** + * Creates an Any message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Any + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Any} Any + */ + Any.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Any) + return object; + var message = new $root.google.protobuf.Any(); + if (object.type_url != null) + message.type_url = String(object.type_url); + if (object.value != null) + if (typeof object.value === "string") + $util.base64.decode(object.value, message.value = $util.newBuffer($util.base64.length(object.value)), 0); + else if (object.value.length >= 0) + message.value = object.value; + return message; + }; + + /** + * Creates a plain object from an Any message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Any + * @static + * @param {google.protobuf.Any} message Any + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Any.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.type_url = ""; + if (options.bytes === String) + object.value = ""; + else { + object.value = []; + if (options.bytes !== Array) + object.value = $util.newBuffer(object.value); + } + } + if (message.type_url != null && message.hasOwnProperty("type_url")) + object.type_url = message.type_url; + if (message.value != null && message.hasOwnProperty("value")) + object.value = options.bytes === String ? $util.base64.encode(message.value, 0, message.value.length) : options.bytes === Array ? Array.prototype.slice.call(message.value) : message.value; + return object; + }; + + /** + * Converts this Any to JSON. + * @function toJSON + * @memberof google.protobuf.Any + * @instance + * @returns {Object.} JSON object + */ + Any.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Any + * @function getTypeUrl + * @memberof google.protobuf.Any + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Any.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Any"; + }; + + return Any; + })(); + + protobuf.Empty = (function() { + + /** + * Properties of an Empty. + * @memberof google.protobuf + * @interface IEmpty + */ + + /** + * Constructs a new Empty. + * @memberof google.protobuf + * @classdesc Represents an Empty. + * @implements IEmpty + * @constructor + * @param {google.protobuf.IEmpty=} [properties] Properties to set + */ + function Empty(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new Empty instance using the specified properties. + * @function create + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty=} [properties] Properties to set + * @returns {google.protobuf.Empty} Empty instance + */ + Empty.create = function create(properties) { + return new Empty(properties); + }; + + /** + * Encodes the specified Empty message. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Empty.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified Empty message, length delimited. Does not implicitly {@link google.protobuf.Empty.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.IEmpty} message Empty message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Empty.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an Empty message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Empty + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Empty} Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Empty.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an Empty message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Empty + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Empty} Empty + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Empty.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an Empty message. + * @function verify + * @memberof google.protobuf.Empty + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Empty.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates an Empty message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Empty + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Empty} Empty + */ + Empty.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Empty) + return object; + return new $root.google.protobuf.Empty(); + }; + + /** + * Creates a plain object from an Empty message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Empty + * @static + * @param {google.protobuf.Empty} message Empty + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Empty.toObject = function toObject() { + return {}; + }; + + /** + * Converts this Empty to JSON. + * @function toJSON + * @memberof google.protobuf.Empty + * @instance + * @returns {Object.} JSON object + */ + Empty.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Empty + * @function getTypeUrl + * @memberof google.protobuf.Empty + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Empty.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Empty"; + }; + + return Empty; + })(); + + return protobuf; + })(); + + google.api = (function() { + + /** + * Namespace api. + * @memberof google + * @namespace + */ + var api = {}; + + api.Http = (function() { + + /** + * Properties of a Http. + * @memberof google.api + * @interface IHttp + * @property {Array.|null} [rules] Http rules + * @property {boolean|null} [fullyDecodeReservedExpansion] Http fullyDecodeReservedExpansion + */ + + /** + * Constructs a new Http. + * @memberof google.api + * @classdesc Represents a Http. + * @implements IHttp + * @constructor + * @param {google.api.IHttp=} [properties] Properties to set + */ + function Http(properties) { + this.rules = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Http rules. + * @member {Array.} rules + * @memberof google.api.Http + * @instance + */ + Http.prototype.rules = $util.emptyArray; + + /** + * Http fullyDecodeReservedExpansion. + * @member {boolean} fullyDecodeReservedExpansion + * @memberof google.api.Http + * @instance + */ + Http.prototype.fullyDecodeReservedExpansion = false; + + /** + * Creates a new Http instance using the specified properties. + * @function create + * @memberof google.api.Http + * @static + * @param {google.api.IHttp=} [properties] Properties to set + * @returns {google.api.Http} Http instance + */ + Http.create = function create(properties) { + return new Http(properties); + }; + + /** + * Encodes the specified Http message. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encode + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.rules != null && message.rules.length) + for (var i = 0; i < message.rules.length; ++i) + $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); + return writer; + }; + + /** + * Encodes the specified Http message, length delimited. Does not implicitly {@link google.api.Http.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.Http + * @static + * @param {google.api.IHttp} message Http message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Http.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Http message from the specified reader or buffer. + * @function decode + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.rules && message.rules.length)) + message.rules = []; + message.rules.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + } + case 2: { + message.fullyDecodeReservedExpansion = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Http message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.Http + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.Http} Http + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Http.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Http message. + * @function verify + * @memberof google.api.Http + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Http.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.rules != null && message.hasOwnProperty("rules")) { + if (!Array.isArray(message.rules)) + return "rules: array expected"; + for (var i = 0; i < message.rules.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.rules[i]); + if (error) + return "rules." + error; + } + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + if (typeof message.fullyDecodeReservedExpansion !== "boolean") + return "fullyDecodeReservedExpansion: boolean expected"; + return null; + }; + + /** + * Creates a Http message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.Http + * @static + * @param {Object.} object Plain object + * @returns {google.api.Http} Http + */ + Http.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.Http) + return object; + var message = new $root.google.api.Http(); + if (object.rules) { + if (!Array.isArray(object.rules)) + throw TypeError(".google.api.Http.rules: array expected"); + message.rules = []; + for (var i = 0; i < object.rules.length; ++i) { + if (typeof object.rules[i] !== "object") + throw TypeError(".google.api.Http.rules: object expected"); + message.rules[i] = $root.google.api.HttpRule.fromObject(object.rules[i]); + } + } + if (object.fullyDecodeReservedExpansion != null) + message.fullyDecodeReservedExpansion = Boolean(object.fullyDecodeReservedExpansion); + return message; + }; + + /** + * Creates a plain object from a Http message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.Http + * @static + * @param {google.api.Http} message Http + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Http.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.rules = []; + if (options.defaults) + object.fullyDecodeReservedExpansion = false; + if (message.rules && message.rules.length) { + object.rules = []; + for (var j = 0; j < message.rules.length; ++j) + object.rules[j] = $root.google.api.HttpRule.toObject(message.rules[j], options); + } + if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + object.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + return object; + }; + + /** + * Converts this Http to JSON. + * @function toJSON + * @memberof google.api.Http + * @instance + * @returns {Object.} JSON object + */ + Http.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Http + * @function getTypeUrl + * @memberof google.api.Http + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Http.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.Http"; + }; + + return Http; + })(); + + api.HttpRule = (function() { + + /** + * Properties of a HttpRule. + * @memberof google.api + * @interface IHttpRule + * @property {string|null} [selector] HttpRule selector + * @property {string|null} [get] HttpRule get + * @property {string|null} [put] HttpRule put + * @property {string|null} [post] HttpRule post + * @property {string|null} ["delete"] HttpRule delete + * @property {string|null} [patch] HttpRule patch + * @property {google.api.ICustomHttpPattern|null} [custom] HttpRule custom + * @property {string|null} [body] HttpRule body + * @property {string|null} [responseBody] HttpRule responseBody + * @property {Array.|null} [additionalBindings] HttpRule additionalBindings + */ + + /** + * Constructs a new HttpRule. + * @memberof google.api + * @classdesc Represents a HttpRule. + * @implements IHttpRule + * @constructor + * @param {google.api.IHttpRule=} [properties] Properties to set + */ + function HttpRule(properties) { + this.additionalBindings = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * HttpRule selector. + * @member {string} selector + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.selector = ""; + + /** + * HttpRule get. + * @member {string|null|undefined} get + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.get = null; + + /** + * HttpRule put. + * @member {string|null|undefined} put + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.put = null; + + /** + * HttpRule post. + * @member {string|null|undefined} post + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.post = null; + + /** + * HttpRule delete. + * @member {string|null|undefined} delete + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype["delete"] = null; + + /** + * HttpRule patch. + * @member {string|null|undefined} patch + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.patch = null; + + /** + * HttpRule custom. + * @member {google.api.ICustomHttpPattern|null|undefined} custom + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.custom = null; + + /** + * HttpRule body. + * @member {string} body + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.body = ""; + + /** + * HttpRule responseBody. + * @member {string} responseBody + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.responseBody = ""; + + /** + * HttpRule additionalBindings. + * @member {Array.} additionalBindings + * @memberof google.api.HttpRule + * @instance + */ + HttpRule.prototype.additionalBindings = $util.emptyArray; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * HttpRule pattern. + * @member {"get"|"put"|"post"|"delete"|"patch"|"custom"|undefined} pattern + * @memberof google.api.HttpRule + * @instance + */ + Object.defineProperty(HttpRule.prototype, "pattern", { + get: $util.oneOfGetter($oneOfFields = ["get", "put", "post", "delete", "patch", "custom"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new HttpRule instance using the specified properties. + * @function create + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule=} [properties] Properties to set + * @returns {google.api.HttpRule} HttpRule instance + */ + HttpRule.create = function create(properties) { + return new HttpRule(properties); + }; + + /** + * Encodes the specified HttpRule message. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @function encode + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HttpRule.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); + if (message.get != null && Object.hasOwnProperty.call(message, "get")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); + if (message.put != null && Object.hasOwnProperty.call(message, "put")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); + if (message.post != null && Object.hasOwnProperty.call(message, "post")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); + if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); + if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); + if (message.body != null && Object.hasOwnProperty.call(message, "body")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); + if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) + $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.additionalBindings != null && message.additionalBindings.length) + for (var i = 0; i < message.additionalBindings.length; ++i) + $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); + if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); + return writer; + }; + + /** + * Encodes the specified HttpRule message, length delimited. Does not implicitly {@link google.api.HttpRule.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.HttpRule + * @static + * @param {google.api.IHttpRule} message HttpRule message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HttpRule.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a HttpRule message from the specified reader or buffer. + * @function decode + * @memberof google.api.HttpRule + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.HttpRule} HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HttpRule.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.selector = reader.string(); + break; + } + case 2: { + message.get = reader.string(); + break; + } + case 3: { + message.put = reader.string(); + break; + } + case 4: { + message.post = reader.string(); + break; + } + case 5: { + message["delete"] = reader.string(); + break; + } + case 6: { + message.patch = reader.string(); + break; + } + case 8: { + message.custom = $root.google.api.CustomHttpPattern.decode(reader, reader.uint32()); + break; + } + case 7: { + message.body = reader.string(); + break; + } + case 12: { + message.responseBody = reader.string(); + break; + } + case 11: { + if (!(message.additionalBindings && message.additionalBindings.length)) + message.additionalBindings = []; + message.additionalBindings.push($root.google.api.HttpRule.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a HttpRule message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.HttpRule + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.HttpRule} HttpRule + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HttpRule.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a HttpRule message. + * @function verify + * @memberof google.api.HttpRule + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + HttpRule.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.selector != null && message.hasOwnProperty("selector")) + if (!$util.isString(message.selector)) + return "selector: string expected"; + if (message.get != null && message.hasOwnProperty("get")) { + properties.pattern = 1; + if (!$util.isString(message.get)) + return "get: string expected"; + } + if (message.put != null && message.hasOwnProperty("put")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.put)) + return "put: string expected"; + } + if (message.post != null && message.hasOwnProperty("post")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.post)) + return "post: string expected"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message["delete"])) + return "delete: string expected"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + if (!$util.isString(message.patch)) + return "patch: string expected"; + } + if (message.custom != null && message.hasOwnProperty("custom")) { + if (properties.pattern === 1) + return "pattern: multiple values"; + properties.pattern = 1; + { + var error = $root.google.api.CustomHttpPattern.verify(message.custom); + if (error) + return "custom." + error; + } + } + if (message.body != null && message.hasOwnProperty("body")) + if (!$util.isString(message.body)) + return "body: string expected"; + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + if (!$util.isString(message.responseBody)) + return "responseBody: string expected"; + if (message.additionalBindings != null && message.hasOwnProperty("additionalBindings")) { + if (!Array.isArray(message.additionalBindings)) + return "additionalBindings: array expected"; + for (var i = 0; i < message.additionalBindings.length; ++i) { + var error = $root.google.api.HttpRule.verify(message.additionalBindings[i]); + if (error) + return "additionalBindings." + error; + } + } + return null; + }; + + /** + * Creates a HttpRule message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.HttpRule + * @static + * @param {Object.} object Plain object + * @returns {google.api.HttpRule} HttpRule + */ + HttpRule.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.HttpRule) + return object; + var message = new $root.google.api.HttpRule(); + if (object.selector != null) + message.selector = String(object.selector); + if (object.get != null) + message.get = String(object.get); + if (object.put != null) + message.put = String(object.put); + if (object.post != null) + message.post = String(object.post); + if (object["delete"] != null) + message["delete"] = String(object["delete"]); + if (object.patch != null) + message.patch = String(object.patch); + if (object.custom != null) { + if (typeof object.custom !== "object") + throw TypeError(".google.api.HttpRule.custom: object expected"); + message.custom = $root.google.api.CustomHttpPattern.fromObject(object.custom); + } + if (object.body != null) + message.body = String(object.body); + if (object.responseBody != null) + message.responseBody = String(object.responseBody); + if (object.additionalBindings) { + if (!Array.isArray(object.additionalBindings)) + throw TypeError(".google.api.HttpRule.additionalBindings: array expected"); + message.additionalBindings = []; + for (var i = 0; i < object.additionalBindings.length; ++i) { + if (typeof object.additionalBindings[i] !== "object") + throw TypeError(".google.api.HttpRule.additionalBindings: object expected"); + message.additionalBindings[i] = $root.google.api.HttpRule.fromObject(object.additionalBindings[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a HttpRule message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.HttpRule + * @static + * @param {google.api.HttpRule} message HttpRule + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + HttpRule.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.additionalBindings = []; + if (options.defaults) { + object.selector = ""; + object.body = ""; + object.responseBody = ""; + } + if (message.selector != null && message.hasOwnProperty("selector")) + object.selector = message.selector; + if (message.get != null && message.hasOwnProperty("get")) { + object.get = message.get; + if (options.oneofs) + object.pattern = "get"; + } + if (message.put != null && message.hasOwnProperty("put")) { + object.put = message.put; + if (options.oneofs) + object.pattern = "put"; + } + if (message.post != null && message.hasOwnProperty("post")) { + object.post = message.post; + if (options.oneofs) + object.pattern = "post"; + } + if (message["delete"] != null && message.hasOwnProperty("delete")) { + object["delete"] = message["delete"]; + if (options.oneofs) + object.pattern = "delete"; + } + if (message.patch != null && message.hasOwnProperty("patch")) { + object.patch = message.patch; + if (options.oneofs) + object.pattern = "patch"; + } + if (message.body != null && message.hasOwnProperty("body")) + object.body = message.body; + if (message.custom != null && message.hasOwnProperty("custom")) { + object.custom = $root.google.api.CustomHttpPattern.toObject(message.custom, options); + if (options.oneofs) + object.pattern = "custom"; + } + if (message.additionalBindings && message.additionalBindings.length) { + object.additionalBindings = []; + for (var j = 0; j < message.additionalBindings.length; ++j) + object.additionalBindings[j] = $root.google.api.HttpRule.toObject(message.additionalBindings[j], options); + } + if (message.responseBody != null && message.hasOwnProperty("responseBody")) + object.responseBody = message.responseBody; + return object; + }; + + /** + * Converts this HttpRule to JSON. + * @function toJSON + * @memberof google.api.HttpRule + * @instance + * @returns {Object.} JSON object + */ + HttpRule.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for HttpRule + * @function getTypeUrl + * @memberof google.api.HttpRule + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + HttpRule.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.HttpRule"; + }; + + return HttpRule; + })(); + + api.CustomHttpPattern = (function() { + + /** + * Properties of a CustomHttpPattern. + * @memberof google.api + * @interface ICustomHttpPattern + * @property {string|null} [kind] CustomHttpPattern kind + * @property {string|null} [path] CustomHttpPattern path + */ + + /** + * Constructs a new CustomHttpPattern. + * @memberof google.api + * @classdesc Represents a CustomHttpPattern. + * @implements ICustomHttpPattern + * @constructor + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + */ + function CustomHttpPattern(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CustomHttpPattern kind. + * @member {string} kind + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.kind = ""; + + /** + * CustomHttpPattern path. + * @member {string} path + * @memberof google.api.CustomHttpPattern + * @instance + */ + CustomHttpPattern.prototype.path = ""; + + /** + * Creates a new CustomHttpPattern instance using the specified properties. + * @function create + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern=} [properties] Properties to set + * @returns {google.api.CustomHttpPattern} CustomHttpPattern instance + */ + CustomHttpPattern.create = function create(properties) { + return new CustomHttpPattern(properties); + }; + + /** + * Encodes the specified CustomHttpPattern message. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encode + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CustomHttpPattern.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); + if (message.path != null && Object.hasOwnProperty.call(message, "path")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); + return writer; + }; + + /** + * Encodes the specified CustomHttpPattern message, length delimited. Does not implicitly {@link google.api.CustomHttpPattern.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.ICustomHttpPattern} message CustomHttpPattern message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CustomHttpPattern.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer. + * @function decode + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.kind = reader.string(); + break; + } + case 2: { + message.path = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CustomHttpPattern message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.CustomHttpPattern + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CustomHttpPattern.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CustomHttpPattern message. + * @function verify + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CustomHttpPattern.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.kind != null && message.hasOwnProperty("kind")) + if (!$util.isString(message.kind)) + return "kind: string expected"; + if (message.path != null && message.hasOwnProperty("path")) + if (!$util.isString(message.path)) + return "path: string expected"; + return null; + }; + + /** + * Creates a CustomHttpPattern message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {Object.} object Plain object + * @returns {google.api.CustomHttpPattern} CustomHttpPattern + */ + CustomHttpPattern.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.CustomHttpPattern) + return object; + var message = new $root.google.api.CustomHttpPattern(); + if (object.kind != null) + message.kind = String(object.kind); + if (object.path != null) + message.path = String(object.path); + return message; + }; + + /** + * Creates a plain object from a CustomHttpPattern message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.CustomHttpPattern + * @static + * @param {google.api.CustomHttpPattern} message CustomHttpPattern + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CustomHttpPattern.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.kind = ""; + object.path = ""; + } + if (message.kind != null && message.hasOwnProperty("kind")) + object.kind = message.kind; + if (message.path != null && message.hasOwnProperty("path")) + object.path = message.path; + return object; + }; + + /** + * Converts this CustomHttpPattern to JSON. + * @function toJSON + * @memberof google.api.CustomHttpPattern + * @instance + * @returns {Object.} JSON object + */ + CustomHttpPattern.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CustomHttpPattern + * @function getTypeUrl + * @memberof google.api.CustomHttpPattern + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CustomHttpPattern.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.CustomHttpPattern"; + }; + + return CustomHttpPattern; + })(); + + /** + * FieldBehavior enum. + * @name google.api.FieldBehavior + * @enum {number} + * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value + * @property {number} OPTIONAL=1 OPTIONAL value + * @property {number} REQUIRED=2 REQUIRED value + * @property {number} OUTPUT_ONLY=3 OUTPUT_ONLY value + * @property {number} INPUT_ONLY=4 INPUT_ONLY value + * @property {number} IMMUTABLE=5 IMMUTABLE value + * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value + * @property {number} NON_EMPTY_DEFAULT=7 NON_EMPTY_DEFAULT value + */ + api.FieldBehavior = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "FIELD_BEHAVIOR_UNSPECIFIED"] = 0; + values[valuesById[1] = "OPTIONAL"] = 1; + values[valuesById[2] = "REQUIRED"] = 2; + values[valuesById[3] = "OUTPUT_ONLY"] = 3; + values[valuesById[4] = "INPUT_ONLY"] = 4; + values[valuesById[5] = "IMMUTABLE"] = 5; + values[valuesById[6] = "UNORDERED_LIST"] = 6; + values[valuesById[7] = "NON_EMPTY_DEFAULT"] = 7; + return values; + })(); + + api.ResourceDescriptor = (function() { + + /** + * Properties of a ResourceDescriptor. + * @memberof google.api + * @interface IResourceDescriptor + * @property {string|null} [type] ResourceDescriptor type + * @property {Array.|null} [pattern] ResourceDescriptor pattern + * @property {string|null} [nameField] ResourceDescriptor nameField + * @property {google.api.ResourceDescriptor.History|null} [history] ResourceDescriptor history + * @property {string|null} [plural] ResourceDescriptor plural + * @property {string|null} [singular] ResourceDescriptor singular + * @property {Array.|null} [style] ResourceDescriptor style + */ + + /** + * Constructs a new ResourceDescriptor. + * @memberof google.api + * @classdesc Represents a ResourceDescriptor. + * @implements IResourceDescriptor + * @constructor + * @param {google.api.IResourceDescriptor=} [properties] Properties to set + */ + function ResourceDescriptor(properties) { + this.pattern = []; + this.style = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ResourceDescriptor type. + * @member {string} type + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.type = ""; + + /** + * ResourceDescriptor pattern. + * @member {Array.} pattern + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.pattern = $util.emptyArray; + + /** + * ResourceDescriptor nameField. + * @member {string} nameField + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.nameField = ""; + + /** + * ResourceDescriptor history. + * @member {google.api.ResourceDescriptor.History} history + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.history = 0; + + /** + * ResourceDescriptor plural. + * @member {string} plural + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.plural = ""; + + /** + * ResourceDescriptor singular. + * @member {string} singular + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.singular = ""; + + /** + * ResourceDescriptor style. + * @member {Array.} style + * @memberof google.api.ResourceDescriptor + * @instance + */ + ResourceDescriptor.prototype.style = $util.emptyArray; + + /** + * Creates a new ResourceDescriptor instance using the specified properties. + * @function create + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor=} [properties] Properties to set + * @returns {google.api.ResourceDescriptor} ResourceDescriptor instance + */ + ResourceDescriptor.create = function create(properties) { + return new ResourceDescriptor(properties); + }; + + /** + * Encodes the specified ResourceDescriptor message. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @function encode + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceDescriptor.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.pattern != null && message.pattern.length) + for (var i = 0; i < message.pattern.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); + if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); + if (message.history != null && Object.hasOwnProperty.call(message, "history")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); + if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); + if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); + if (message.style != null && message.style.length) { + writer.uint32(/* id 10, wireType 2 =*/82).fork(); + for (var i = 0; i < message.style.length; ++i) + writer.int32(message.style[i]); + writer.ldelim(); + } + return writer; + }; + + /** + * Encodes the specified ResourceDescriptor message, length delimited. Does not implicitly {@link google.api.ResourceDescriptor.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.IResourceDescriptor} message ResourceDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceDescriptor.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer. + * @function decode + * @memberof google.api.ResourceDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceDescriptor.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.type = reader.string(); + break; + } + case 2: { + if (!(message.pattern && message.pattern.length)) + message.pattern = []; + message.pattern.push(reader.string()); + break; + } + case 3: { + message.nameField = reader.string(); + break; + } + case 4: { + message.history = reader.int32(); + break; + } + case 5: { + message.plural = reader.string(); + break; + } + case 6: { + message.singular = reader.string(); + break; + } + case 10: { + if (!(message.style && message.style.length)) + message.style = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.style.push(reader.int32()); + } else + message.style.push(reader.int32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ResourceDescriptor message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ResourceDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceDescriptor.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ResourceDescriptor message. + * @function verify + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ResourceDescriptor.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.pattern != null && message.hasOwnProperty("pattern")) { + if (!Array.isArray(message.pattern)) + return "pattern: array expected"; + for (var i = 0; i < message.pattern.length; ++i) + if (!$util.isString(message.pattern[i])) + return "pattern: string[] expected"; + } + if (message.nameField != null && message.hasOwnProperty("nameField")) + if (!$util.isString(message.nameField)) + return "nameField: string expected"; + if (message.history != null && message.hasOwnProperty("history")) + switch (message.history) { + default: + return "history: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.plural != null && message.hasOwnProperty("plural")) + if (!$util.isString(message.plural)) + return "plural: string expected"; + if (message.singular != null && message.hasOwnProperty("singular")) + if (!$util.isString(message.singular)) + return "singular: string expected"; + if (message.style != null && message.hasOwnProperty("style")) { + if (!Array.isArray(message.style)) + return "style: array expected"; + for (var i = 0; i < message.style.length; ++i) + switch (message.style[i]) { + default: + return "style: enum value[] expected"; + case 0: + case 1: + break; + } + } + return null; + }; + + /** + * Creates a ResourceDescriptor message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {Object.} object Plain object + * @returns {google.api.ResourceDescriptor} ResourceDescriptor + */ + ResourceDescriptor.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceDescriptor) + return object; + var message = new $root.google.api.ResourceDescriptor(); + if (object.type != null) + message.type = String(object.type); + if (object.pattern) { + if (!Array.isArray(object.pattern)) + throw TypeError(".google.api.ResourceDescriptor.pattern: array expected"); + message.pattern = []; + for (var i = 0; i < object.pattern.length; ++i) + message.pattern[i] = String(object.pattern[i]); + } + if (object.nameField != null) + message.nameField = String(object.nameField); + switch (object.history) { + default: + if (typeof object.history === "number") { + message.history = object.history; + break; + } + break; + case "HISTORY_UNSPECIFIED": + case 0: + message.history = 0; + break; + case "ORIGINALLY_SINGLE_PATTERN": + case 1: + message.history = 1; + break; + case "FUTURE_MULTI_PATTERN": + case 2: + message.history = 2; + break; + } + if (object.plural != null) + message.plural = String(object.plural); + if (object.singular != null) + message.singular = String(object.singular); + if (object.style) { + if (!Array.isArray(object.style)) + throw TypeError(".google.api.ResourceDescriptor.style: array expected"); + message.style = []; + for (var i = 0; i < object.style.length; ++i) + switch (object.style[i]) { + default: + if (typeof object.style[i] === "number") { + message.style[i] = object.style[i]; + break; + } + case "STYLE_UNSPECIFIED": + case 0: + message.style[i] = 0; + break; + case "DECLARATIVE_FRIENDLY": + case 1: + message.style[i] = 1; + break; + } + } + return message; + }; + + /** + * Creates a plain object from a ResourceDescriptor message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ResourceDescriptor + * @static + * @param {google.api.ResourceDescriptor} message ResourceDescriptor + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ResourceDescriptor.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.pattern = []; + object.style = []; + } + if (options.defaults) { + object.type = ""; + object.nameField = ""; + object.history = options.enums === String ? "HISTORY_UNSPECIFIED" : 0; + object.plural = ""; + object.singular = ""; + } + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.pattern && message.pattern.length) { + object.pattern = []; + for (var j = 0; j < message.pattern.length; ++j) + object.pattern[j] = message.pattern[j]; + } + if (message.nameField != null && message.hasOwnProperty("nameField")) + object.nameField = message.nameField; + if (message.history != null && message.hasOwnProperty("history")) + object.history = options.enums === String ? $root.google.api.ResourceDescriptor.History[message.history] === undefined ? message.history : $root.google.api.ResourceDescriptor.History[message.history] : message.history; + if (message.plural != null && message.hasOwnProperty("plural")) + object.plural = message.plural; + if (message.singular != null && message.hasOwnProperty("singular")) + object.singular = message.singular; + if (message.style && message.style.length) { + object.style = []; + for (var j = 0; j < message.style.length; ++j) + object.style[j] = options.enums === String ? $root.google.api.ResourceDescriptor.Style[message.style[j]] === undefined ? message.style[j] : $root.google.api.ResourceDescriptor.Style[message.style[j]] : message.style[j]; + } + return object; + }; + + /** + * Converts this ResourceDescriptor to JSON. + * @function toJSON + * @memberof google.api.ResourceDescriptor + * @instance + * @returns {Object.} JSON object + */ + ResourceDescriptor.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ResourceDescriptor + * @function getTypeUrl + * @memberof google.api.ResourceDescriptor + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ResourceDescriptor.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.ResourceDescriptor"; + }; + + /** + * History enum. + * @name google.api.ResourceDescriptor.History + * @enum {number} + * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value + * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value + * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value + */ + ResourceDescriptor.History = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "HISTORY_UNSPECIFIED"] = 0; + values[valuesById[1] = "ORIGINALLY_SINGLE_PATTERN"] = 1; + values[valuesById[2] = "FUTURE_MULTI_PATTERN"] = 2; + return values; + })(); + + /** + * Style enum. + * @name google.api.ResourceDescriptor.Style + * @enum {number} + * @property {number} STYLE_UNSPECIFIED=0 STYLE_UNSPECIFIED value + * @property {number} DECLARATIVE_FRIENDLY=1 DECLARATIVE_FRIENDLY value + */ + ResourceDescriptor.Style = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STYLE_UNSPECIFIED"] = 0; + values[valuesById[1] = "DECLARATIVE_FRIENDLY"] = 1; + return values; + })(); + + return ResourceDescriptor; + })(); + + api.ResourceReference = (function() { + + /** + * Properties of a ResourceReference. + * @memberof google.api + * @interface IResourceReference + * @property {string|null} [type] ResourceReference type + * @property {string|null} [childType] ResourceReference childType + */ + + /** + * Constructs a new ResourceReference. + * @memberof google.api + * @classdesc Represents a ResourceReference. + * @implements IResourceReference + * @constructor + * @param {google.api.IResourceReference=} [properties] Properties to set + */ + function ResourceReference(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ResourceReference type. + * @member {string} type + * @memberof google.api.ResourceReference + * @instance + */ + ResourceReference.prototype.type = ""; + + /** + * ResourceReference childType. + * @member {string} childType + * @memberof google.api.ResourceReference + * @instance + */ + ResourceReference.prototype.childType = ""; + + /** + * Creates a new ResourceReference instance using the specified properties. + * @function create + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference=} [properties] Properties to set + * @returns {google.api.ResourceReference} ResourceReference instance + */ + ResourceReference.create = function create(properties) { + return new ResourceReference(properties); + }; + + /** + * Encodes the specified ResourceReference message. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @function encode + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceReference.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); + if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); + return writer; + }; + + /** + * Encodes the specified ResourceReference message, length delimited. Does not implicitly {@link google.api.ResourceReference.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.ResourceReference + * @static + * @param {google.api.IResourceReference} message ResourceReference message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ResourceReference.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ResourceReference message from the specified reader or buffer. + * @function decode + * @memberof google.api.ResourceReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.ResourceReference} ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceReference.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.type = reader.string(); + break; + } + case 2: { + message.childType = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ResourceReference message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ResourceReference + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ResourceReference} ResourceReference + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ResourceReference.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ResourceReference message. + * @function verify + * @memberof google.api.ResourceReference + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ResourceReference.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.childType != null && message.hasOwnProperty("childType")) + if (!$util.isString(message.childType)) + return "childType: string expected"; + return null; + }; + + /** + * Creates a ResourceReference message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ResourceReference + * @static + * @param {Object.} object Plain object + * @returns {google.api.ResourceReference} ResourceReference + */ + ResourceReference.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ResourceReference) + return object; + var message = new $root.google.api.ResourceReference(); + if (object.type != null) + message.type = String(object.type); + if (object.childType != null) + message.childType = String(object.childType); + return message; + }; + + /** + * Creates a plain object from a ResourceReference message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ResourceReference + * @static + * @param {google.api.ResourceReference} message ResourceReference + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ResourceReference.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.type = ""; + object.childType = ""; + } + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.childType != null && message.hasOwnProperty("childType")) + object.childType = message.childType; + return object; + }; + + /** + * Converts this ResourceReference to JSON. + * @function toJSON + * @memberof google.api.ResourceReference + * @instance + * @returns {Object.} JSON object + */ + ResourceReference.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ResourceReference + * @function getTypeUrl + * @memberof google.api.ResourceReference + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ResourceReference.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.ResourceReference"; + }; + + return ResourceReference; + })(); + + return api; + })(); + + google.rpc = (function() { + + /** + * Namespace rpc. + * @memberof google + * @namespace + */ + var rpc = {}; + + rpc.Status = (function() { + + /** + * Properties of a Status. + * @memberof google.rpc + * @interface IStatus + * @property {number|null} [code] Status code + * @property {string|null} [message] Status message + * @property {Array.|null} [details] Status details + */ + + /** + * Constructs a new Status. + * @memberof google.rpc + * @classdesc Represents a Status. + * @implements IStatus + * @constructor + * @param {google.rpc.IStatus=} [properties] Properties to set + */ + function Status(properties) { + this.details = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Status code. + * @member {number} code + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.code = 0; + + /** + * Status message. + * @member {string} message + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.message = ""; + + /** + * Status details. + * @member {Array.} details + * @memberof google.rpc.Status + * @instance + */ + Status.prototype.details = $util.emptyArray; + + /** + * Creates a new Status instance using the specified properties. + * @function create + * @memberof google.rpc.Status + * @static + * @param {google.rpc.IStatus=} [properties] Properties to set + * @returns {google.rpc.Status} Status instance + */ + Status.create = function create(properties) { + return new Status(properties); + }; + + /** + * Encodes the specified Status message. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @function encode + * @memberof google.rpc.Status + * @static + * @param {google.rpc.IStatus} message Status message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Status.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.code != null && Object.hasOwnProperty.call(message, "code")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); + if (message.message != null && Object.hasOwnProperty.call(message, "message")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.message); + if (message.details != null && message.details.length) + for (var i = 0; i < message.details.length; ++i) + $root.google.protobuf.Any.encode(message.details[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Status message, length delimited. Does not implicitly {@link google.rpc.Status.verify|verify} messages. + * @function encodeDelimited + * @memberof google.rpc.Status + * @static + * @param {google.rpc.IStatus} message Status message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Status.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Status message from the specified reader or buffer. + * @function decode + * @memberof google.rpc.Status + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.rpc.Status} Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Status.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.Status(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.code = reader.int32(); + break; + } + case 2: { + message.message = reader.string(); + break; + } + case 3: { + if (!(message.details && message.details.length)) + message.details = []; + message.details.push($root.google.protobuf.Any.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Status message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.rpc.Status + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.rpc.Status} Status + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Status.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Status message. + * @function verify + * @memberof google.rpc.Status + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Status.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.code != null && message.hasOwnProperty("code")) + if (!$util.isInteger(message.code)) + return "code: integer expected"; + if (message.message != null && message.hasOwnProperty("message")) + if (!$util.isString(message.message)) + return "message: string expected"; + if (message.details != null && message.hasOwnProperty("details")) { + if (!Array.isArray(message.details)) + return "details: array expected"; + for (var i = 0; i < message.details.length; ++i) { + var error = $root.google.protobuf.Any.verify(message.details[i]); + if (error) + return "details." + error; + } + } + return null; + }; + + /** + * Creates a Status message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.rpc.Status + * @static + * @param {Object.} object Plain object + * @returns {google.rpc.Status} Status + */ + Status.fromObject = function fromObject(object) { + if (object instanceof $root.google.rpc.Status) + return object; + var message = new $root.google.rpc.Status(); + if (object.code != null) + message.code = object.code | 0; + if (object.message != null) + message.message = String(object.message); + if (object.details) { + if (!Array.isArray(object.details)) + throw TypeError(".google.rpc.Status.details: array expected"); + message.details = []; + for (var i = 0; i < object.details.length; ++i) { + if (typeof object.details[i] !== "object") + throw TypeError(".google.rpc.Status.details: object expected"); + message.details[i] = $root.google.protobuf.Any.fromObject(object.details[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a Status message. Also converts values to other types if specified. + * @function toObject + * @memberof google.rpc.Status + * @static + * @param {google.rpc.Status} message Status + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Status.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.details = []; + if (options.defaults) { + object.code = 0; + object.message = ""; + } + if (message.code != null && message.hasOwnProperty("code")) + object.code = message.code; + if (message.message != null && message.hasOwnProperty("message")) + object.message = message.message; + if (message.details && message.details.length) { + object.details = []; + for (var j = 0; j < message.details.length; ++j) + object.details[j] = $root.google.protobuf.Any.toObject(message.details[j], options); + } + return object; + }; + + /** + * Converts this Status to JSON. + * @function toJSON + * @memberof google.rpc.Status + * @instance + * @returns {Object.} JSON object + */ + Status.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Status + * @function getTypeUrl + * @memberof google.rpc.Status + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Status.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.rpc.Status"; + }; + + return Status; + })(); + + return rpc; + })(); + + return google; + })(); + + return $root; +}); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json new file mode 100644 index 00000000000..e8388095ced --- /dev/null +++ b/handwritten/bigquery-storage/protos/protos.json @@ -0,0 +1,2817 @@ +{ + "nested": { + "google": { + "nested": { + "cloud": { + "nested": { + "bigquery": { + "nested": { + "storage": { + "nested": { + "v1": { + "options": { + "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", + "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", + "java_package": "com.google.cloud.bigquery.storage.v1", + "java_multiple_files": true, + "java_outer_classname": "TableProto", + "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1", + "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", + "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" + }, + "nested": { + "_columnName": { + "oneof": [ + "columnName" + ] + }, + "columnName": { + "type": "string", + "id": 454943157, + "extend": "google.protobuf.FieldOptions", + "options": { + "proto3_optional": true + } + }, + "ArrowSchema": { + "fields": { + "serializedSchema": { + "type": "bytes", + "id": 1 + } + } + }, + "ArrowRecordBatch": { + "fields": { + "serializedRecordBatch": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2, + "options": { + "deprecated": true + } + } + } + }, + "ArrowSerializationOptions": { + "fields": { + "bufferCompression": { + "type": "CompressionCodec", + "id": 2 + } + }, + "nested": { + "CompressionCodec": { + "values": { + "COMPRESSION_UNSPECIFIED": 0, + "LZ4_FRAME": 1, + "ZSTD": 2 + } + } + } + }, + "AvroSchema": { + "fields": { + "schema": { + "type": "string", + "id": 1 + } + } + }, + "AvroRows": { + "fields": { + "serializedBinaryRows": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2, + "options": { + "deprecated": true + } + } + } + }, + "AvroSerializationOptions": { + "fields": { + "enableDisplayNameAttribute": { + "type": "bool", + "id": 1 + } + } + }, + "ProtoSchema": { + "fields": { + "protoDescriptor": { + "type": "google.protobuf.DescriptorProto", + "id": 1 + } + } + }, + "ProtoRows": { + "fields": { + "serializedRows": { + "rule": "repeated", + "type": "bytes", + "id": 1 + } + } + }, + "BigQueryRead": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateReadSession": { + "requestType": "CreateReadSessionRequest", + "responseType": "ReadSession", + "options": { + "(google.api.http).post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "parent,read_session,max_stream_count" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{read_session.table=projects/*/datasets/*/tables/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "parent,read_session,max_stream_count" + } + ] + }, + "ReadRows": { + "requestType": "ReadRowsRequest", + "responseType": "ReadRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}", + "(google.api.method_signature)": "read_stream,offset" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "read_stream,offset" + } + ] + }, + "SplitReadStream": { + "requestType": "SplitReadStreamRequest", + "responseType": "SplitReadStreamResponse", + "options": { + "(google.api.http).get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" + } + } + ] + } + } + }, + "BigQueryWrite": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateWriteStream": { + "requestType": "CreateWriteStreamRequest", + "responseType": "WriteStream", + "options": { + "(google.api.http).post": "/v1/{parent=projects/*/datasets/*/tables/*}", + "(google.api.http).body": "write_stream", + "(google.api.method_signature)": "parent,write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{parent=projects/*/datasets/*/tables/*}", + "body": "write_stream" + } + }, + { + "(google.api.method_signature)": "parent,write_stream" + } + ] + }, + "AppendRows": { + "requestType": "AppendRowsRequest", + "requestStream": true, + "responseType": "AppendRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "write_stream" + } + ] + }, + "GetWriteStream": { + "requestType": "GetWriteStreamRequest", + "responseType": "WriteStream", + "options": { + "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "name" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "name" + } + ] + }, + "FinalizeWriteStream": { + "requestType": "FinalizeWriteStreamRequest", + "responseType": "FinalizeWriteStreamResponse", + "options": { + "(google.api.http).post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "name" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{name=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "name" + } + ] + }, + "BatchCommitWriteStreams": { + "requestType": "BatchCommitWriteStreamsRequest", + "responseType": "BatchCommitWriteStreamsResponse", + "options": { + "(google.api.http).get": "/v1/{parent=projects/*/datasets/*/tables/*}", + "(google.api.method_signature)": "parent" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1/{parent=projects/*/datasets/*/tables/*}" + } + }, + { + "(google.api.method_signature)": "parent" + } + ] + }, + "FlushRows": { + "requestType": "FlushRowsRequest", + "responseType": "FlushRowsResponse", + "options": { + "(google.api.http).post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "write_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "write_stream" + } + ] + } + } + }, + "CreateReadSessionRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" + } + }, + "readSession": { + "type": "ReadSession", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "maxStreamCount": { + "type": "int32", + "id": 3 + }, + "preferredMinStreamCount": { + "type": "int32", + "id": 4 + } + } + }, + "ReadRowsRequest": { + "fields": { + "readStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" + } + }, + "offset": { + "type": "int64", + "id": 2 + } + } + }, + "ThrottleState": { + "fields": { + "throttlePercent": { + "type": "int32", + "id": 1 + } + } + }, + "StreamStats": { + "fields": { + "progress": { + "type": "Progress", + "id": 2 + } + }, + "nested": { + "Progress": { + "fields": { + "atResponseStart": { + "type": "double", + "id": 1 + }, + "atResponseEnd": { + "type": "double", + "id": 2 + } + } + } + } + }, + "ReadRowsResponse": { + "oneofs": { + "rows": { + "oneof": [ + "avroRows", + "arrowRecordBatch" + ] + }, + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] + } + }, + "fields": { + "avroRows": { + "type": "AvroRows", + "id": 3 + }, + "arrowRecordBatch": { + "type": "ArrowRecordBatch", + "id": 4 + }, + "rowCount": { + "type": "int64", + "id": 6 + }, + "stats": { + "type": "StreamStats", + "id": 2 + }, + "throttleState": { + "type": "ThrottleState", + "id": 5 + }, + "avroSchema": { + "type": "AvroSchema", + "id": 7, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 8, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + } + }, + "SplitReadStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/ReadStream" + } + }, + "fraction": { + "type": "double", + "id": 2 + } + } + }, + "SplitReadStreamResponse": { + "fields": { + "primaryStream": { + "type": "ReadStream", + "id": 1 + }, + "remainderStream": { + "type": "ReadStream", + "id": 2 + } + } + }, + "CreateWriteStreamRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "writeStream": { + "type": "WriteStream", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "AppendRowsRequest": { + "oneofs": { + "rows": { + "oneof": [ + "protoRows" + ] + } + }, + "fields": { + "writeStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + }, + "offset": { + "type": "google.protobuf.Int64Value", + "id": 2 + }, + "protoRows": { + "type": "ProtoData", + "id": 4 + }, + "traceId": { + "type": "string", + "id": 6 + } + }, + "nested": { + "ProtoData": { + "fields": { + "writerSchema": { + "type": "ProtoSchema", + "id": 1 + }, + "rows": { + "type": "ProtoRows", + "id": 2 + } + } + } + } + }, + "AppendRowsResponse": { + "oneofs": { + "response": { + "oneof": [ + "appendResult", + "error" + ] + } + }, + "fields": { + "appendResult": { + "type": "AppendResult", + "id": 1 + }, + "error": { + "type": "google.rpc.Status", + "id": 2 + }, + "updatedSchema": { + "type": "TableSchema", + "id": 3 + }, + "rowErrors": { + "rule": "repeated", + "type": "RowError", + "id": 4 + }, + "writeStream": { + "type": "string", + "id": 5 + } + }, + "nested": { + "AppendResult": { + "fields": { + "offset": { + "type": "google.protobuf.Int64Value", + "id": 1 + } + } + } + } + }, + "GetWriteStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + }, + "view": { + "type": "WriteStreamView", + "id": 3 + } + } + }, + "BatchCommitWriteStreamsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "writeStreams": { + "rule": "repeated", + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCommitWriteStreamsResponse": { + "fields": { + "commitTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "streamErrors": { + "rule": "repeated", + "type": "StorageError", + "id": 2 + } + } + }, + "FinalizeWriteStreamRequest": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + } + } + }, + "FinalizeWriteStreamResponse": { + "fields": { + "rowCount": { + "type": "int64", + "id": 1 + } + } + }, + "FlushRowsRequest": { + "fields": { + "writeStream": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquerystorage.googleapis.com/WriteStream" + } + }, + "offset": { + "type": "google.protobuf.Int64Value", + "id": 2 + } + } + }, + "FlushRowsResponse": { + "fields": { + "offset": { + "type": "int64", + "id": 1 + } + } + }, + "StorageError": { + "fields": { + "code": { + "type": "StorageErrorCode", + "id": 1 + }, + "entity": { + "type": "string", + "id": 2 + }, + "errorMessage": { + "type": "string", + "id": 3 + } + }, + "nested": { + "StorageErrorCode": { + "values": { + "STORAGE_ERROR_CODE_UNSPECIFIED": 0, + "TABLE_NOT_FOUND": 1, + "STREAM_ALREADY_COMMITTED": 2, + "STREAM_NOT_FOUND": 3, + "INVALID_STREAM_TYPE": 4, + "INVALID_STREAM_STATE": 5, + "STREAM_FINALIZED": 6, + "SCHEMA_MISMATCH_EXTRA_FIELDS": 7, + "OFFSET_ALREADY_EXISTS": 8, + "OFFSET_OUT_OF_RANGE": 9 + } + } + } + }, + "RowError": { + "fields": { + "index": { + "type": "int64", + "id": 1 + }, + "code": { + "type": "RowErrorCode", + "id": 2 + }, + "message": { + "type": "string", + "id": 3 + } + }, + "nested": { + "RowErrorCode": { + "values": { + "ROW_ERROR_CODE_UNSPECIFIED": 0, + "FIELDS_ERROR": 1 + } + } + } + }, + "DataFormat": { + "values": { + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, + "ARROW": 2 + } + }, + "ReadSession": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" + }, + "oneofs": { + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] + } + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "expireTime": { + "type": "google.protobuf.Timestamp", + "id": 2, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "dataFormat": { + "type": "DataFormat", + "id": 3, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + }, + "avroSchema": { + "type": "AvroSchema", + "id": 4, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "table": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "IMMUTABLE", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 7, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "readOptions": { + "type": "TableReadOptions", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "streams": { + "rule": "repeated", + "type": "ReadStream", + "id": 10, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "estimatedTotalBytesScanned": { + "type": "int64", + "id": 12, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "traceId": { + "type": "string", + "id": 13, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + }, + "nested": { + "TableModifiers": { + "fields": { + "snapshotTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + } + } + }, + "TableReadOptions": { + "oneofs": { + "outputFormatSerializationOptions": { + "oneof": [ + "arrowSerializationOptions", + "avroSerializationOptions" + ] + } + }, + "fields": { + "selectedFields": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "rowRestriction": { + "type": "string", + "id": 2 + }, + "arrowSerializationOptions": { + "type": "ArrowSerializationOptions", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "avroSerializationOptions": { + "type": "AvroSerializationOptions", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + } + } + }, + "ReadStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + } + }, + "WriteStreamView": { + "values": { + "WRITE_STREAM_VIEW_UNSPECIFIED": 0, + "BASIC": 1, + "FULL": 2 + } + }, + "WriteStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/WriteStream", + "(google.api.resource).pattern": "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "type": { + "type": "Type", + "id": 2, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + }, + "createTime": { + "type": "google.protobuf.Timestamp", + "id": 3, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "commitTime": { + "type": "google.protobuf.Timestamp", + "id": 4, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "tableSchema": { + "type": "TableSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "writeMode": { + "type": "WriteMode", + "id": 7, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + }, + "location": { + "type": "string", + "id": 8, + "options": { + "(google.api.field_behavior)": "IMMUTABLE" + } + } + }, + "nested": { + "Type": { + "values": { + "TYPE_UNSPECIFIED": 0, + "COMMITTED": 1, + "PENDING": 2, + "BUFFERED": 3 + } + }, + "WriteMode": { + "values": { + "WRITE_MODE_UNSPECIFIED": 0, + "INSERT": 1 + } + } + } + }, + "TableSchema": { + "fields": { + "fields": { + "rule": "repeated", + "type": "TableFieldSchema", + "id": 1 + } + } + }, + "TableFieldSchema": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "type": { + "type": "Type", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "mode": { + "type": "Mode", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "fields": { + "rule": "repeated", + "type": "TableFieldSchema", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "description": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "maxLength": { + "type": "int64", + "id": 7, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "precision": { + "type": "int64", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "scale": { + "type": "int64", + "id": 9, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + }, + "nested": { + "Type": { + "values": { + "TYPE_UNSPECIFIED": 0, + "STRING": 1, + "INT64": 2, + "DOUBLE": 3, + "STRUCT": 4, + "BYTES": 5, + "BOOL": 6, + "TIMESTAMP": 7, + "DATE": 8, + "TIME": 9, + "DATETIME": 10, + "GEOGRAPHY": 11, + "NUMERIC": 12, + "BIGNUMERIC": 13, + "INTERVAL": 14, + "JSON": 15 + } + }, + "Mode": { + "values": { + "MODE_UNSPECIFIED": 0, + "NULLABLE": 1, + "REQUIRED": 2, + "REPEATED": 3 + } + } + } + } + } + }, + "v1beta1": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", + "java_outer_classname": "TableReferenceProto", + "java_package": "com.google.cloud.bigquery.storage.v1beta1" + }, + "nested": { + "ArrowSchema": { + "fields": { + "serializedSchema": { + "type": "bytes", + "id": 1 + } + } + }, + "ArrowRecordBatch": { + "fields": { + "serializedRecordBatch": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "AvroSchema": { + "fields": { + "schema": { + "type": "string", + "id": 1 + } + } + }, + "AvroRows": { + "fields": { + "serializedBinaryRows": { + "type": "bytes", + "id": 1 + }, + "rowCount": { + "type": "int64", + "id": 2 + } + } + }, + "TableReadOptions": { + "fields": { + "selectedFields": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "rowRestriction": { + "type": "string", + "id": 2 + } + } + }, + "BigQueryStorage": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "CreateReadSession": { + "requestType": "CreateReadSessionRequest", + "responseType": "ReadSession", + "options": { + "(google.api.http).post": "/v1beta1/{table_reference.project_id=projects/*}", + "(google.api.http).body": "*", + "(google.api.http).additional_bindings.post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", + "(google.api.http).additional_bindings.body": "*", + "(google.api.method_signature)": "table_reference,parent,requested_streams" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{table_reference.project_id=projects/*}", + "body": "*", + "additional_bindings": { + "post": "/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}", + "body": "*" + } + } + }, + { + "(google.api.method_signature)": "table_reference,parent,requested_streams" + } + ] + }, + "ReadRows": { + "requestType": "ReadRowsRequest", + "responseType": "ReadRowsResponse", + "responseStream": true, + "options": { + "(google.api.http).get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}", + "(google.api.method_signature)": "read_position" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1beta1/{read_position.stream.name=projects/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "read_position" + } + ] + }, + "BatchCreateReadSessionStreams": { + "requestType": "BatchCreateReadSessionStreamsRequest", + "responseType": "BatchCreateReadSessionStreamsResponse", + "options": { + "(google.api.http).post": "/v1beta1/{session.name=projects/*/sessions/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "session,requested_streams" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{session.name=projects/*/sessions/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "session,requested_streams" + } + ] + }, + "FinalizeStream": { + "requestType": "FinalizeStreamRequest", + "responseType": "google.protobuf.Empty", + "options": { + "(google.api.http).post": "/v1beta1/{stream.name=projects/*/streams/*}", + "(google.api.http).body": "*", + "(google.api.method_signature)": "stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta1/{stream.name=projects/*/streams/*}", + "body": "*" + } + }, + { + "(google.api.method_signature)": "stream" + } + ] + }, + "SplitReadStream": { + "requestType": "SplitReadStreamRequest", + "responseType": "SplitReadStreamResponse", + "options": { + "(google.api.http).get": "/v1beta1/{original_stream.name=projects/*/streams/*}", + "(google.api.method_signature)": "original_stream" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1beta1/{original_stream.name=projects/*/streams/*}" + } + }, + { + "(google.api.method_signature)": "original_stream" + } + ] + } + } + }, + "Stream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/Stream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/streams/{stream}" + }, + "fields": { + "name": { + "type": "string", + "id": 1 + } + } + }, + "StreamPosition": { + "fields": { + "stream": { + "type": "Stream", + "id": 1 + }, + "offset": { + "type": "int64", + "id": 2 + } + } + }, + "ReadSession": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadSession", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}" + }, + "oneofs": { + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] + } + }, + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "expireTime": { + "type": "google.protobuf.Timestamp", + "id": 2 + }, + "avroSchema": { + "type": "AvroSchema", + "id": 5 + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 6 + }, + "streams": { + "rule": "repeated", + "type": "Stream", + "id": 4 + }, + "tableReference": { + "type": "TableReference", + "id": 7 + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 8 + }, + "shardingStrategy": { + "type": "ShardingStrategy", + "id": 9 + } + } + }, + "CreateReadSessionRequest": { + "fields": { + "tableReference": { + "type": "TableReference", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "parent": { + "type": "string", + "id": 6, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "cloudresourcemanager.googleapis.com/Project" + } + }, + "tableModifiers": { + "type": "TableModifiers", + "id": 2 + }, + "requestedStreams": { + "type": "int32", + "id": 3 + }, + "readOptions": { + "type": "TableReadOptions", + "id": 4 + }, + "format": { + "type": "DataFormat", + "id": 5 + }, + "shardingStrategy": { + "type": "ShardingStrategy", + "id": 7 + } + } + }, + "DataFormat": { + "values": { + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, + "ARROW": 3 + } + }, + "ShardingStrategy": { + "values": { + "SHARDING_STRATEGY_UNSPECIFIED": 0, + "LIQUID": 1, + "BALANCED": 2 + } + }, + "ReadRowsRequest": { + "fields": { + "readPosition": { + "type": "StreamPosition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "StreamStatus": { + "fields": { + "estimatedRowCount": { + "type": "int64", + "id": 1 + }, + "fractionConsumed": { + "type": "float", + "id": 2 + }, + "progress": { + "type": "Progress", + "id": 4 + }, + "isSplittable": { + "type": "bool", + "id": 3 + } + } + }, + "Progress": { + "fields": { + "atResponseStart": { + "type": "float", + "id": 1 + }, + "atResponseEnd": { + "type": "float", + "id": 2 + } + } + }, + "ThrottleStatus": { + "fields": { + "throttlePercent": { + "type": "int32", + "id": 1 + } + } + }, + "ReadRowsResponse": { + "oneofs": { + "rows": { + "oneof": [ + "avroRows", + "arrowRecordBatch" + ] + } + }, + "fields": { + "avroRows": { + "type": "AvroRows", + "id": 3 + }, + "arrowRecordBatch": { + "type": "ArrowRecordBatch", + "id": 4 + }, + "rowCount": { + "type": "int64", + "id": 6 + }, + "status": { + "type": "StreamStatus", + "id": 2 + }, + "throttleStatus": { + "type": "ThrottleStatus", + "id": 5 + } + } + }, + "BatchCreateReadSessionStreamsRequest": { + "fields": { + "session": { + "type": "ReadSession", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "requestedStreams": { + "type": "int32", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCreateReadSessionStreamsResponse": { + "fields": { + "streams": { + "rule": "repeated", + "type": "Stream", + "id": 1 + } + } + }, + "FinalizeStreamRequest": { + "fields": { + "stream": { + "type": "Stream", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "SplitReadStreamRequest": { + "fields": { + "originalStream": { + "type": "Stream", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "fraction": { + "type": "float", + "id": 2 + } + } + }, + "SplitReadStreamResponse": { + "fields": { + "primaryStream": { + "type": "Stream", + "id": 1 + }, + "remainderStream": { + "type": "Stream", + "id": 2 + } + } + }, + "TableReference": { + "fields": { + "projectId": { + "type": "string", + "id": 1 + }, + "datasetId": { + "type": "string", + "id": 2 + }, + "tableId": { + "type": "string", + "id": 3 + } + } + }, + "TableModifiers": { + "fields": { + "snapshotTime": { + "type": "google.protobuf.Timestamp", + "id": 1 + } + } + } + } + } + } + } + } + } + } + }, + "protobuf": { + "options": { + "go_package": "google.golang.org/protobuf/types/descriptorpb", + "java_package": "com.google.protobuf", + "java_outer_classname": "DescriptorProtos", + "csharp_namespace": "Google.Protobuf.Reflection", + "objc_class_prefix": "GPB", + "cc_enable_arenas": true, + "optimize_for": "SPEED" + }, + "nested": { + "FileDescriptorSet": { + "fields": { + "file": { + "rule": "repeated", + "type": "FileDescriptorProto", + "id": 1 + } + } + }, + "FileDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "package": { + "type": "string", + "id": 2 + }, + "dependency": { + "rule": "repeated", + "type": "string", + "id": 3 + }, + "publicDependency": { + "rule": "repeated", + "type": "int32", + "id": 10, + "options": { + "packed": false + } + }, + "weakDependency": { + "rule": "repeated", + "type": "int32", + "id": 11, + "options": { + "packed": false + } + }, + "messageType": { + "rule": "repeated", + "type": "DescriptorProto", + "id": 4 + }, + "enumType": { + "rule": "repeated", + "type": "EnumDescriptorProto", + "id": 5 + }, + "service": { + "rule": "repeated", + "type": "ServiceDescriptorProto", + "id": 6 + }, + "extension": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 7 + }, + "options": { + "type": "FileOptions", + "id": 8 + }, + "sourceCodeInfo": { + "type": "SourceCodeInfo", + "id": 9 + }, + "syntax": { + "type": "string", + "id": 12 + }, + "edition": { + "type": "string", + "id": 13 + } + } + }, + "DescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "field": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 2 + }, + "extension": { + "rule": "repeated", + "type": "FieldDescriptorProto", + "id": 6 + }, + "nestedType": { + "rule": "repeated", + "type": "DescriptorProto", + "id": 3 + }, + "enumType": { + "rule": "repeated", + "type": "EnumDescriptorProto", + "id": 4 + }, + "extensionRange": { + "rule": "repeated", + "type": "ExtensionRange", + "id": 5 + }, + "oneofDecl": { + "rule": "repeated", + "type": "OneofDescriptorProto", + "id": 8 + }, + "options": { + "type": "MessageOptions", + "id": 7 + }, + "reservedRange": { + "rule": "repeated", + "type": "ReservedRange", + "id": 9 + }, + "reservedName": { + "rule": "repeated", + "type": "string", + "id": 10 + } + }, + "nested": { + "ExtensionRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + }, + "options": { + "type": "ExtensionRangeOptions", + "id": 3 + } + } + }, + "ReservedRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + } + } + } + } + }, + "ExtensionRangeOptions": { + "fields": { + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "FieldDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "number": { + "type": "int32", + "id": 3 + }, + "label": { + "type": "Label", + "id": 4 + }, + "type": { + "type": "Type", + "id": 5 + }, + "typeName": { + "type": "string", + "id": 6 + }, + "extendee": { + "type": "string", + "id": 2 + }, + "defaultValue": { + "type": "string", + "id": 7 + }, + "oneofIndex": { + "type": "int32", + "id": 9 + }, + "jsonName": { + "type": "string", + "id": 10 + }, + "options": { + "type": "FieldOptions", + "id": 8 + }, + "proto3Optional": { + "type": "bool", + "id": 17 + } + }, + "nested": { + "Type": { + "values": { + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18 + } + }, + "Label": { + "values": { + "LABEL_OPTIONAL": 1, + "LABEL_REQUIRED": 2, + "LABEL_REPEATED": 3 + } + } + } + }, + "OneofDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "options": { + "type": "OneofOptions", + "id": 2 + } + } + }, + "EnumDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "value": { + "rule": "repeated", + "type": "EnumValueDescriptorProto", + "id": 2 + }, + "options": { + "type": "EnumOptions", + "id": 3 + }, + "reservedRange": { + "rule": "repeated", + "type": "EnumReservedRange", + "id": 4 + }, + "reservedName": { + "rule": "repeated", + "type": "string", + "id": 5 + } + }, + "nested": { + "EnumReservedRange": { + "fields": { + "start": { + "type": "int32", + "id": 1 + }, + "end": { + "type": "int32", + "id": 2 + } + } + } + } + }, + "EnumValueDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "number": { + "type": "int32", + "id": 2 + }, + "options": { + "type": "EnumValueOptions", + "id": 3 + } + } + }, + "ServiceDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "method": { + "rule": "repeated", + "type": "MethodDescriptorProto", + "id": 2 + }, + "options": { + "type": "ServiceOptions", + "id": 3 + } + } + }, + "MethodDescriptorProto": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "inputType": { + "type": "string", + "id": 2 + }, + "outputType": { + "type": "string", + "id": 3 + }, + "options": { + "type": "MethodOptions", + "id": 4 + }, + "clientStreaming": { + "type": "bool", + "id": 5, + "options": { + "default": false + } + }, + "serverStreaming": { + "type": "bool", + "id": 6, + "options": { + "default": false + } + } + } + }, + "FileOptions": { + "fields": { + "javaPackage": { + "type": "string", + "id": 1 + }, + "javaOuterClassname": { + "type": "string", + "id": 8 + }, + "javaMultipleFiles": { + "type": "bool", + "id": 10, + "options": { + "default": false + } + }, + "javaGenerateEqualsAndHash": { + "type": "bool", + "id": 20, + "options": { + "deprecated": true + } + }, + "javaStringCheckUtf8": { + "type": "bool", + "id": 27, + "options": { + "default": false + } + }, + "optimizeFor": { + "type": "OptimizeMode", + "id": 9, + "options": { + "default": "SPEED" + } + }, + "goPackage": { + "type": "string", + "id": 11 + }, + "ccGenericServices": { + "type": "bool", + "id": 16, + "options": { + "default": false + } + }, + "javaGenericServices": { + "type": "bool", + "id": 17, + "options": { + "default": false + } + }, + "pyGenericServices": { + "type": "bool", + "id": 18, + "options": { + "default": false + } + }, + "phpGenericServices": { + "type": "bool", + "id": 42, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 23, + "options": { + "default": false + } + }, + "ccEnableArenas": { + "type": "bool", + "id": 31, + "options": { + "default": true + } + }, + "objcClassPrefix": { + "type": "string", + "id": 36 + }, + "csharpNamespace": { + "type": "string", + "id": 37 + }, + "swiftPrefix": { + "type": "string", + "id": 39 + }, + "phpClassPrefix": { + "type": "string", + "id": 40 + }, + "phpNamespace": { + "type": "string", + "id": 41 + }, + "phpMetadataNamespace": { + "type": "string", + "id": 44 + }, + "rubyPackage": { + "type": "string", + "id": 45 + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 38, + 38 + ] + ], + "nested": { + "OptimizeMode": { + "values": { + "SPEED": 1, + "CODE_SIZE": 2, + "LITE_RUNTIME": 3 + } + } + } + }, + "MessageOptions": { + "fields": { + "messageSetWireFormat": { + "type": "bool", + "id": 1, + "options": { + "default": false + } + }, + "noStandardDescriptorAccessor": { + "type": "bool", + "id": 2, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "mapEntry": { + "type": "bool", + "id": 7 + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 4, + 4 + ], + [ + 5, + 5 + ], + [ + 6, + 6 + ], + [ + 8, + 8 + ], + [ + 9, + 9 + ] + ] + }, + "FieldOptions": { + "fields": { + "ctype": { + "type": "CType", + "id": 1, + "options": { + "default": "STRING" + } + }, + "packed": { + "type": "bool", + "id": 2 + }, + "jstype": { + "type": "JSType", + "id": 6, + "options": { + "default": "JS_NORMAL" + } + }, + "lazy": { + "type": "bool", + "id": 5, + "options": { + "default": false + } + }, + "unverifiedLazy": { + "type": "bool", + "id": 15, + "options": { + "default": false + } + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "weak": { + "type": "bool", + "id": 10, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 4, + 4 + ] + ], + "nested": { + "CType": { + "values": { + "STRING": 0, + "CORD": 1, + "STRING_PIECE": 2 + } + }, + "JSType": { + "values": { + "JS_NORMAL": 0, + "JS_STRING": 1, + "JS_NUMBER": 2 + } + } + } + }, + "OneofOptions": { + "fields": { + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "EnumOptions": { + "fields": { + "allowAlias": { + "type": "bool", + "id": 2 + }, + "deprecated": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "reserved": [ + [ + 5, + 5 + ] + ] + }, + "EnumValueOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 1, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "ServiceOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 33, + "options": { + "default": false + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ] + }, + "MethodOptions": { + "fields": { + "deprecated": { + "type": "bool", + "id": 33, + "options": { + "default": false + } + }, + "idempotencyLevel": { + "type": "IdempotencyLevel", + "id": 34, + "options": { + "default": "IDEMPOTENCY_UNKNOWN" + } + }, + "uninterpretedOption": { + "rule": "repeated", + "type": "UninterpretedOption", + "id": 999 + } + }, + "extensions": [ + [ + 1000, + 536870911 + ] + ], + "nested": { + "IdempotencyLevel": { + "values": { + "IDEMPOTENCY_UNKNOWN": 0, + "NO_SIDE_EFFECTS": 1, + "IDEMPOTENT": 2 + } + } + } + }, + "UninterpretedOption": { + "fields": { + "name": { + "rule": "repeated", + "type": "NamePart", + "id": 2 + }, + "identifierValue": { + "type": "string", + "id": 3 + }, + "positiveIntValue": { + "type": "uint64", + "id": 4 + }, + "negativeIntValue": { + "type": "int64", + "id": 5 + }, + "doubleValue": { + "type": "double", + "id": 6 + }, + "stringValue": { + "type": "bytes", + "id": 7 + }, + "aggregateValue": { + "type": "string", + "id": 8 + } + }, + "nested": { + "NamePart": { + "fields": { + "namePart": { + "rule": "required", + "type": "string", + "id": 1 + }, + "isExtension": { + "rule": "required", + "type": "bool", + "id": 2 + } + } + } + } + }, + "SourceCodeInfo": { + "fields": { + "location": { + "rule": "repeated", + "type": "Location", + "id": 1 + } + }, + "nested": { + "Location": { + "fields": { + "path": { + "rule": "repeated", + "type": "int32", + "id": 1 + }, + "span": { + "rule": "repeated", + "type": "int32", + "id": 2 + }, + "leadingComments": { + "type": "string", + "id": 3 + }, + "trailingComments": { + "type": "string", + "id": 4 + }, + "leadingDetachedComments": { + "rule": "repeated", + "type": "string", + "id": 6 + } + } + } + } + }, + "GeneratedCodeInfo": { + "fields": { + "annotation": { + "rule": "repeated", + "type": "Annotation", + "id": 1 + } + }, + "nested": { + "Annotation": { + "fields": { + "path": { + "rule": "repeated", + "type": "int32", + "id": 1 + }, + "sourceFile": { + "type": "string", + "id": 2 + }, + "begin": { + "type": "int32", + "id": 3 + }, + "end": { + "type": "int32", + "id": 4 + }, + "semantic": { + "type": "Semantic", + "id": 5 + } + }, + "nested": { + "Semantic": { + "values": { + "NONE": 0, + "SET": 1, + "ALIAS": 2 + } + } + } + } + } + }, + "Timestamp": { + "fields": { + "seconds": { + "type": "int64", + "id": 1 + }, + "nanos": { + "type": "int32", + "id": 2 + } + } + }, + "DoubleValue": { + "fields": { + "value": { + "type": "double", + "id": 1 + } + } + }, + "FloatValue": { + "fields": { + "value": { + "type": "float", + "id": 1 + } + } + }, + "Int64Value": { + "fields": { + "value": { + "type": "int64", + "id": 1 + } + } + }, + "UInt64Value": { + "fields": { + "value": { + "type": "uint64", + "id": 1 + } + } + }, + "Int32Value": { + "fields": { + "value": { + "type": "int32", + "id": 1 + } + } + }, + "UInt32Value": { + "fields": { + "value": { + "type": "uint32", + "id": 1 + } + } + }, + "BoolValue": { + "fields": { + "value": { + "type": "bool", + "id": 1 + } + } + }, + "StringValue": { + "fields": { + "value": { + "type": "string", + "id": 1 + } + } + }, + "BytesValue": { + "fields": { + "value": { + "type": "bytes", + "id": 1 + } + } + }, + "Any": { + "fields": { + "type_url": { + "type": "string", + "id": 1 + }, + "value": { + "type": "bytes", + "id": 2 + } + } + }, + "Empty": { + "fields": {} + } + } + }, + "api": { + "options": { + "go_package": "google.golang.org/genproto/googleapis/api/annotations;annotations", + "java_multiple_files": true, + "java_outer_classname": "ResourceProto", + "java_package": "com.google.api", + "objc_class_prefix": "GAPI", + "cc_enable_arenas": true + }, + "nested": { + "http": { + "type": "HttpRule", + "id": 72295728, + "extend": "google.protobuf.MethodOptions" + }, + "Http": { + "fields": { + "rules": { + "rule": "repeated", + "type": "HttpRule", + "id": 1 + }, + "fullyDecodeReservedExpansion": { + "type": "bool", + "id": 2 + } + } + }, + "HttpRule": { + "oneofs": { + "pattern": { + "oneof": [ + "get", + "put", + "post", + "delete", + "patch", + "custom" + ] + } + }, + "fields": { + "selector": { + "type": "string", + "id": 1 + }, + "get": { + "type": "string", + "id": 2 + }, + "put": { + "type": "string", + "id": 3 + }, + "post": { + "type": "string", + "id": 4 + }, + "delete": { + "type": "string", + "id": 5 + }, + "patch": { + "type": "string", + "id": 6 + }, + "custom": { + "type": "CustomHttpPattern", + "id": 8 + }, + "body": { + "type": "string", + "id": 7 + }, + "responseBody": { + "type": "string", + "id": 12 + }, + "additionalBindings": { + "rule": "repeated", + "type": "HttpRule", + "id": 11 + } + } + }, + "CustomHttpPattern": { + "fields": { + "kind": { + "type": "string", + "id": 1 + }, + "path": { + "type": "string", + "id": 2 + } + } + }, + "methodSignature": { + "rule": "repeated", + "type": "string", + "id": 1051, + "extend": "google.protobuf.MethodOptions" + }, + "defaultHost": { + "type": "string", + "id": 1049, + "extend": "google.protobuf.ServiceOptions" + }, + "oauthScopes": { + "type": "string", + "id": 1050, + "extend": "google.protobuf.ServiceOptions" + }, + "fieldBehavior": { + "rule": "repeated", + "type": "google.api.FieldBehavior", + "id": 1052, + "extend": "google.protobuf.FieldOptions" + }, + "FieldBehavior": { + "values": { + "FIELD_BEHAVIOR_UNSPECIFIED": 0, + "OPTIONAL": 1, + "REQUIRED": 2, + "OUTPUT_ONLY": 3, + "INPUT_ONLY": 4, + "IMMUTABLE": 5, + "UNORDERED_LIST": 6, + "NON_EMPTY_DEFAULT": 7 + } + }, + "resourceReference": { + "type": "google.api.ResourceReference", + "id": 1055, + "extend": "google.protobuf.FieldOptions" + }, + "resourceDefinition": { + "rule": "repeated", + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.FileOptions" + }, + "resource": { + "type": "google.api.ResourceDescriptor", + "id": 1053, + "extend": "google.protobuf.MessageOptions" + }, + "ResourceDescriptor": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "pattern": { + "rule": "repeated", + "type": "string", + "id": 2 + }, + "nameField": { + "type": "string", + "id": 3 + }, + "history": { + "type": "History", + "id": 4 + }, + "plural": { + "type": "string", + "id": 5 + }, + "singular": { + "type": "string", + "id": 6 + }, + "style": { + "rule": "repeated", + "type": "Style", + "id": 10 + } + }, + "nested": { + "History": { + "values": { + "HISTORY_UNSPECIFIED": 0, + "ORIGINALLY_SINGLE_PATTERN": 1, + "FUTURE_MULTI_PATTERN": 2 + } + }, + "Style": { + "values": { + "STYLE_UNSPECIFIED": 0, + "DECLARATIVE_FRIENDLY": 1 + } + } + } + }, + "ResourceReference": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "childType": { + "type": "string", + "id": 2 + } + } + } + } + }, + "rpc": { + "options": { + "cc_enable_arenas": true, + "go_package": "google.golang.org/genproto/googleapis/rpc/status;status", + "java_multiple_files": true, + "java_outer_classname": "StatusProto", + "java_package": "com.google.rpc", + "objc_class_prefix": "RPC" + }, + "nested": { + "Status": { + "fields": { + "code": { + "type": "int32", + "id": 1 + }, + "message": { + "type": "string", + "id": 2 + }, + "details": { + "rule": "repeated", + "type": "google.protobuf.Any", + "id": 3 + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts new file mode 100644 index 00000000000..8d6b763438d --- /dev/null +++ b/handwritten/bigquery-storage/src/index.ts @@ -0,0 +1,38 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v1 from './v1'; +import * as v1beta1 from './v1beta1'; +const BigQueryReadClient = v1.BigQueryReadClient; +type BigQueryReadClient = v1.BigQueryReadClient; +const BigQueryWriteClient = v1.BigQueryWriteClient; +type BigQueryWriteClient = v1.BigQueryWriteClient; +const BigQueryStorageClient = v1beta1.BigQueryStorageClient; +type BigQueryStorageClient = v1beta1.BigQueryStorageClient; +export { + v1, + BigQueryReadClient, + v1beta1, + BigQueryStorageClient, + BigQueryWriteClient, +}; +// For compatibility with JavaScript libraries we need to provide this default export: +// tslint:disable-next-line no-default-export +export default {v1, BigQueryReadClient, BigQueryWriteClient}; +import * as protos from '../protos/protos'; +export {protos}; diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts new file mode 100644 index 00000000000..1a78d38a741 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -0,0 +1,938 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type { + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import {PassThrough} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/big_query_read_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './big_query_read_client_config.json'; +const version = require('../../../package.json').version; + +/** + * BigQuery Read API. + * + * The Read API can be used to read data from BigQuery. + * @class + * @memberof v1 + */ +export class BigQueryReadClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + bigQueryReadStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryReadClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryReadClient({fallback: 'rest'}, gax); + * ``` + */ + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof BigQueryReadClient; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + readSessionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}' + ), + writeStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + readRows: new this._gaxModule.StreamDescriptor( + this._gaxModule.StreamType.SERVER_STREAMING, + opts.fallback === 'rest' + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1.BigQueryRead', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryReadStub) { + return this.bigQueryReadStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1.BigQueryRead. + this.bigQueryReadStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1.BigQueryRead' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, + this._opts, + this._providedCustomServicePath + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryReadStubMethods = [ + 'createReadSession', + 'readRows', + 'splitReadStream', + ]; + for (const methodName of bigQueryReadStubMethods) { + const callPromise = this.bigQueryReadStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) + ); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.bigQueryReadStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + /** + * Creates a new read session. A read session divides the contents of a + * BigQuery table into one or more streams, which can then be used to read + * data from the table. The read session also specifies properties of the + * data to be read, such as a list of columns or a push-down filter describing + * the rows to be returned. + * + * A particular row can be read by at most one stream. When the caller has + * reached the end of each stream in the session, then all the data in the + * table has been read. + * + * Data is assigned to each stream such that roughly the same number of + * rows can be read from each stream. Because the server-side unit for + * assigning data is collections of rows, the API does not guarantee that + * each stream will return the same number or rows. Additionally, the + * limits are enforced based on the number of pre-filtered rows, so some + * filters can lead to lopsided assignments. + * + * Read sessions automatically expire 6 hours after they are created and do + * not require manual clean-up by the caller. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The request project that owns the session, in the form of + * `projects/{project_id}`. + * @param {google.cloud.bigquery.storage.v1.ReadSession} request.readSession + * Required. Session to be created. + * @param {number} request.maxStreamCount + * Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table. + * There is a default system max limit of 1,000. + * + * This must be greater than or equal to preferred_min_stream_count. + * Typically, clients should either leave this unset to let the system to + * determine an upper bound OR set this a size for the maximum "units of work" + * it can gracefully handle. + * @param {number} request.preferredMinStreamCount + * The minimum preferred stream count. This parameter can be used to inform + * the service that there is a desired lower bound on the number of streams. + * This is typically a target parallelism of the client (e.g. a Spark + * cluster with N-workers would set this to a low multiple of N to ensure + * good cluster utilization). + * + * The system will make a best effort to provide at least this number of + * streams, but in some cases might provide less. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1.ReadSession}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_read.create_read_session.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async + */ + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'read_session.table': request.readSession!.table ?? '', + }); + this.initialize(); + return this.innerApiCalls.createReadSession(request, options, callback); + } + /** + * Splits a given `ReadStream` into two `ReadStream` objects. These + * `ReadStream` objects are referred to as the primary and the residual + * streams of the split. The original `ReadStream` can still be read from in + * the same manner as before. Both of the returned `ReadStream` objects can + * also be read from, and the rows returned by both child streams will be + * the same as the rows read from the original stream. + * + * Moreover, the two child streams will be allocated back-to-back in the + * original `ReadStream`. Concretely, it is guaranteed that for streams + * original, primary, and residual, that original[0-j] = primary[0-j] and + * original[j-n] = residual[0-m] once the streams have been read to + * completion. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to split. + * @param {number} request.fraction + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to a data storage boundary on the server side. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_read.split_read_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async + */ + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.splitReadStream(request, options, callback); + } + + /** + * Reads rows from the stream in the format prescribed by the ReadSession. + * Each response contains one or more table rows, up to a maximum of 100 MiB + * per response; read requests which attempt to read individual rows larger + * than 100 MiB will fail. + * + * Each request also returns a set of stream statistics reflecting the current + * state of the stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.readStream + * Required. Stream to read rows from. + * @param {number} request.offset + * The offset requested must be less than the last row read from Read. + * Requesting a larger offset is undefined. If not specified, start reading + * from offset zero. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * for more details and examples. + * @example include:samples/generated/v1/big_query_read.read_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryRead_ReadRows_async + */ + readRows( + request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, + options?: CallOptions + ): gax.CancellableStream { + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + read_stream: request.readStream ?? '', + }); + this.initialize(); + return this.innerApiCalls.readRows(request, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string + ) { + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Return a fully-qualified writeStream resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @param {string} stream + * @returns {string} Resource name string. + */ + writeStreamPath( + project: string, + dataset: string, + table: string, + stream: string + ) { + return this.pathTemplates.writeStreamPathTemplate.render({ + project: project, + dataset: dataset, + table: table, + stream: stream, + }); + } + + /** + * Parse the project from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .project; + } + + /** + * Parse the dataset from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .dataset; + } + + /** + * Parse the table from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the table. + */ + matchTableFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .table; + } + + /** + * Parse the stream from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .stream; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.bigQueryReadStub && !this._terminated) { + return this.bigQueryReadStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json new file mode 100644 index 00000000000..42b2735b9fe --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client_config.json @@ -0,0 +1,44 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1.BigQueryRead": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateReadSession": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ReadRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "SplitReadStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json new file mode 100644 index 00000000000..d730716117c --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_read_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", + "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", + "../../protos/google/cloud/bigquery/storage/v1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1/stream.proto", + "../../protos/google/cloud/bigquery/storage/v1/table.proto" +] diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts new file mode 100644 index 00000000000..6e002a67e7c --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -0,0 +1,1213 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type { + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import {PassThrough} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/big_query_write_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './big_query_write_client_config.json'; +const version = require('../../../package.json').version; + +/** + * BigQuery Write API. + * + * The Write API can be used to write data to BigQuery. + * + * For supplementary information about the Write API, see: + * https://cloud.google.com/bigquery/docs/write-api + * @class + * @memberof v1 + */ +export class BigQueryWriteClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + bigQueryWriteStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryWriteClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryWriteClient({fallback: 'rest'}, gax); + * ``` + */ + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof BigQueryWriteClient; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + readSessionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}' + ), + writeStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + appendRows: new this._gaxModule.StreamDescriptor( + this._gaxModule.StreamType.BIDI_STREAMING, + opts.fallback === 'rest' + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1.BigQueryWrite', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryWriteStub) { + return this.bigQueryWriteStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1.BigQueryWrite. + this.bigQueryWriteStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1.BigQueryWrite' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryWrite, + this._opts, + this._providedCustomServicePath + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryWriteStubMethods = [ + 'createWriteStream', + 'appendRows', + 'getWriteStream', + 'finalizeWriteStream', + 'batchCommitWriteStreams', + 'flushRows', + ]; + for (const methodName of bigQueryWriteStubMethods) { + const callPromise = this.bigQueryWriteStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) + ); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.bigQueryWriteStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/bigquery.insertdata', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + /** + * Creates a write stream to the given table. + * Additionally, every table has a special stream named '_default' + * to which data can be written. This stream doesn't need to be created using + * CreateWriteStream. It is a stream that can be used simultaneously by any + * number of clients. Data written to this stream is considered committed as + * soon as an acknowledgement is received. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {google.cloud.bigquery.storage.v1.WriteStream} request.writeStream + * Required. Stream to be created. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.create_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async + */ + createWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + createWriteStream( + request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createWriteStream( + request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.createWriteStream(request, options, callback); + } + /** + * Gets information about a write stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to get, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {google.cloud.bigquery.storage.v1.WriteStreamView} request.view + * Indicates whether to get full or partial view of the WriteStream. If + * not set, view returned will be basic. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.get_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async + */ + getWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + getWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + getWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + getWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.getWriteStream(request, options, callback); + } + /** + * Finalize a write stream so that no new data can be appended to the + * stream. Finalize is not supported on the '_default' stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to finalize, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.finalize_write_stream.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async + */ + finalizeWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | undefined + ), + {} | undefined + ] + >; + finalizeWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + finalizeWriteStream( + request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + finalizeWriteStream( + request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.finalizeWriteStream(request, options, callback); + } + /** + * Atomically commits a group of `PENDING` streams that belong to the same + * `parent` table. + * + * Streams must be finalized before commit and cannot be committed multiple + * times. Once a stream is committed, data in the stream becomes available + * for read operations. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Parent table that all the streams should belong to, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {string[]} request.writeStreams + * Required. The group of streams that will be committed atomically. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.batch_commit_write_streams.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async + */ + batchCommitWriteStreams( + request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | undefined + ), + {} | undefined + ] + >; + batchCommitWriteStreams( + request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCommitWriteStreams( + request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCommitWriteStreams( + request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.batchCommitWriteStreams( + request, + options, + callback + ); + } + /** + * Flushes rows to a BUFFERED stream. + * + * If users are appending rows to BUFFERED stream, flush operation is + * required in order for the rows to become available for reading. A + * Flush operation flushes up to any previously flushed offset in a BUFFERED + * stream, to the offset specified in the request. + * + * Flush is not supported on the _default stream, since it is not BUFFERED. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.writeStream + * Required. The stream that is the target of the flush operation. + * @param {google.protobuf.Int64Value} request.offset + * Ending offset of the flush operation. Rows before this offset(including + * this offset) will be flushed. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.flush_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async + */ + flushRows( + request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, + {} | undefined + ] + >; + flushRows( + request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + flushRows( + request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + flushRows( + request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + write_stream: request.writeStream ?? '', + }); + this.initialize(); + return this.innerApiCalls.flushRows(request, options, callback); + } + + /** + * Appends data to the given stream. + * + * If `offset` is specified, the `offset` is checked against the end of + * stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an + * attempt is made to append to an offset beyond the current end of the stream + * or `ALREADY_EXISTS` if user provides an `offset` that has already been + * written to. User can retry with adjusted offset within the same RPC + * connection. If `offset` is not specified, append happens at the end of the + * stream. + * + * The response contains an optional offset at which the append + * happened. No offset information will be returned for appends to a + * default stream. + * + * Responses are received in the same order in which requests are sent. + * There will be one response for each successful inserted request. Responses + * may optionally embed error information if the originating AppendRequest was + * not successfully processed. + * + * The specifics of when successfully appended data is made visible to the + * table are governed by the type of stream: + * + * * For COMMITTED streams (which includes the default stream), data is + * visible immediately upon successful append. + * + * * For BUFFERED streams, data is made visible via a subsequent `FlushRows` + * rpc which advances a cursor to a newer offset in the stream. + * + * * For PENDING streams, data is not made visible until the stream itself is + * finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly + * committed via the `BatchCommitWriteStreams` rpc. + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing [AppendRowsRequest]{@link google.cloud.bigquery.storage.v1.AppendRowsRequest} for write() method, and + * will emit objects representing [AppendRowsResponse]{@link google.cloud.bigquery.storage.v1.AppendRowsResponse} on 'data' event asynchronously. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) + * for more details and examples. + * @example include:samples/generated/v1/big_query_write.append_rows.js + * region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async + */ + appendRows(options?: CallOptions): gax.CancellableStream { + this.initialize(); + return this.innerApiCalls.appendRows(null, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string + ) { + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Return a fully-qualified writeStream resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @param {string} stream + * @returns {string} Resource name string. + */ + writeStreamPath( + project: string, + dataset: string, + table: string, + stream: string + ) { + return this.pathTemplates.writeStreamPathTemplate.render({ + project: project, + dataset: dataset, + table: table, + stream: stream, + }); + } + + /** + * Parse the project from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .project; + } + + /** + * Parse the dataset from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .dataset; + } + + /** + * Parse the table from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the table. + */ + matchTableFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .table; + } + + /** + * Parse the stream from WriteStream resource. + * + * @param {string} writeStreamName + * A fully-qualified path representing WriteStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromWriteStreamName(writeStreamName: string) { + return this.pathTemplates.writeStreamPathTemplate.match(writeStreamName) + .stream; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.bigQueryWriteStub && !this._terminated) { + return this.bigQueryWriteStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json new file mode 100644 index 00000000000..4b7f4b0657b --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1.BigQueryWrite": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "deadline_exceeded_resource_exhausted_unavailable": [ + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ec82364a95d03873ac5f61710bb6b9b42e40f31d": { + "initial_retry_delay_millis": 10000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 120000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateWriteStream": { + "timeout_millis": 1200000, + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", + "retry_params_name": "ec82364a95d03873ac5f61710bb6b9b42e40f31d" + }, + "AppendRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "GetWriteStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FinalizeWriteStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "BatchCommitWriteStreams": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FlushRows": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json new file mode 100644 index 00000000000..d730716117c --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/big_query_write_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1/annotations.proto", + "../../protos/google/cloud/bigquery/storage/v1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1/protobuf.proto", + "../../protos/google/cloud/bigquery/storage/v1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1/stream.proto", + "../../protos/google/cloud/bigquery/storage/v1/table.proto" +] diff --git a/handwritten/bigquery-storage/src/v1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1/gapic_metadata.json new file mode 100644 index 00000000000..f52c2dae123 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/gapic_metadata.json @@ -0,0 +1,117 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1", + "libraryPackage": "@google-cloud/bigquery-storage", + "services": { + "BigQueryRead": { + "clients": { + "grpc": { + "libraryClient": "BigQueryReadClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + }, + "ReadRows": { + "methods": [ + "readRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryReadClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + } + } + } + } + }, + "BigQueryWrite": { + "clients": { + "grpc": { + "libraryClient": "BigQueryWriteClient", + "rpcs": { + "CreateWriteStream": { + "methods": [ + "createWriteStream" + ] + }, + "GetWriteStream": { + "methods": [ + "getWriteStream" + ] + }, + "FinalizeWriteStream": { + "methods": [ + "finalizeWriteStream" + ] + }, + "BatchCommitWriteStreams": { + "methods": [ + "batchCommitWriteStreams" + ] + }, + "FlushRows": { + "methods": [ + "flushRows" + ] + }, + "AppendRows": { + "methods": [ + "appendRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryWriteClient", + "rpcs": { + "CreateWriteStream": { + "methods": [ + "createWriteStream" + ] + }, + "GetWriteStream": { + "methods": [ + "getWriteStream" + ] + }, + "FinalizeWriteStream": { + "methods": [ + "finalizeWriteStream" + ] + }, + "BatchCommitWriteStreams": { + "methods": [ + "batchCommitWriteStreams" + ] + }, + "FlushRows": { + "methods": [ + "flushRows" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts new file mode 100644 index 00000000000..f3bacd94214 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -0,0 +1,20 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {BigQueryReadClient} from './big_query_read_client'; +export {BigQueryWriteClient} from './big_query_write_client'; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts new file mode 100644 index 00000000000..19120696d61 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -0,0 +1,1004 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type { + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import {PassThrough} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1beta1/big_query_storage_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './big_query_storage_client_config.json'; +const version = require('../../../package.json').version; + +/** + * BigQuery storage API. + * + * The BigQuery storage API can be used to read data stored in BigQuery. + * @class + * @memberof v1beta1 + */ +export class BigQueryStorageClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + bigQueryStorageStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BigQueryStorageClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new BigQueryStorageClient({fallback: 'rest'}, gax); + * ``` + */ + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof BigQueryStorageClient; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest') { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + readSessionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}' + ), + streamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/streams/{stream}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + readRows: new this._gaxModule.StreamDescriptor( + this._gaxModule.StreamType.SERVER_STREAMING, + opts.fallback === 'rest' + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.bigQueryStorageStub) { + return this.bigQueryStorageStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1beta1.BigQueryStorage. + this.bigQueryStorageStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1beta1 + .BigQueryStorage, + this._opts, + this._providedCustomServicePath + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const bigQueryStorageStubMethods = [ + 'createReadSession', + 'readRows', + 'batchCreateReadSessionStreams', + 'finalizeStream', + 'splitReadStream', + ]; + for (const methodName of bigQueryStorageStubMethods) { + const callPromise = this.bigQueryStorageStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) + ); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.bigQueryStorageStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerystorage.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + /** + * Creates a new read session. A read session divides the contents of a + * BigQuery table into one or more streams, which can then be used to read + * data from the table. The read session also specifies properties of the + * data to be read, such as a list of columns or a push-down filter describing + * the rows to be returned. + * + * A particular row can be read by at most one stream. When the caller has + * reached the end of each stream in the session, then all the data in the + * table has been read. + * + * Read sessions automatically expire 24 hours after they are created and do + * not require manual clean-up by the caller. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.TableReference} request.tableReference + * Required. Reference to the table to read. + * @param {string} request.parent + * Required. String of the form `projects/{project_id}` indicating the + * project this ReadSession is associated with. This is the project that will + * be billed for usage. + * @param {google.cloud.bigquery.storage.v1beta1.TableModifiers} request.tableModifiers + * Any modifiers to the Table (e.g. snapshot timestamp). + * @param {number} request.requestedStreams + * Initial number of streams. If unset or 0, we will + * provide a value of streams so as to produce reasonable throughput. Must be + * non-negative. The number of streams may be lower than the requested number, + * depending on the amount parallelism that is reasonable for the table and + * the maximum amount of parallelism allowed by the system. + * + * Streams must be read starting from offset 0. + * @param {google.cloud.bigquery.storage.v1beta1.TableReadOptions} request.readOptions + * Read options for this session (e.g. column selection, filters). + * @param {google.cloud.bigquery.storage.v1beta1.DataFormat} request.format + * Data output format. Currently default to Avro. + * @param {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} request.shardingStrategy + * The strategy to use for distributing data among multiple streams. Currently + * defaults to liquid sharding. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1beta1.ReadSession}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.create_read_session.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async + */ + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + >; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): void; + createReadSession( + request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'table_reference.project_id': request.tableReference!.projectId ?? '', + 'table_reference.dataset_id': request.tableReference!.datasetId ?? '', + }); + this.initialize(); + return this.innerApiCalls.createReadSession(request, options, callback); + } + /** + * Creates additional streams for a ReadSession. This API can be used to + * dynamically adjust the parallelism of a batch processing task upwards by + * adding additional workers. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.ReadSession} request.session + * Required. Must be a non-expired session obtained from a call to + * CreateReadSession. Only the name field needs to be set. + * @param {number} request.requestedStreams + * Required. Number of new streams requested. Must be positive. + * Number of added streams may be less than this, see CreateReadSessionRequest + * for more information. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async + */ + batchCreateReadSessionStreams( + request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined + ), + {} | undefined + ] + >; + batchCreateReadSessionStreams( + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCreateReadSessionStreams( + request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCreateReadSessionStreams( + request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'session.name': request.session!.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.batchCreateReadSessionStreams( + request, + options, + callback + ); + } + /** + * Triggers the graceful termination of a single stream in a ReadSession. This + * API can be used to dynamically adjust the parallelism of a batch processing + * task downwards without losing data. + * + * This API does not delete the stream -- it remains visible in the + * ReadSession, and any data processed by the stream is not released to other + * streams. However, no additional data will be assigned to the stream once + * this call completes. Callers must continue reading data on the stream until + * the end of the stream is reached so that data which has already been + * assigned to the stream will be processed. + * + * This method will return an error if there are no other live streams + * in the Session, or if SplitReadStream() has been called on the given + * Stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.stream + * Required. Stream to finalize. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.finalize_stream.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async + */ + finalizeStream( + request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined + ), + {} | undefined + ] + >; + finalizeStream( + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + finalizeStream( + request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + finalizeStream( + request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'stream.name': request.stream!.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.finalizeStream(request, options, callback); + } + /** + * Splits a given read stream into two Streams. These streams are referred to + * as the primary and the residual of the split. The original stream can still + * be read from in the same manner as before. Both of the returned streams can + * also be read from, and the total rows return by both child streams will be + * the same as the rows read from the original stream. + * + * Moreover, the two child streams will be allocated back to back in the + * original Stream. Concretely, it is guaranteed that for streams Original, + * Primary, and Residual, that Original[0-j] = Primary[0-j] and + * Original[j-n] = Residual[0-m] once the streams have been read to + * completion. + * + * This method is guaranteed to be idempotent. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.Stream} request.originalStream + * Required. Stream to split. + * @param {number} request.fraction + * A value in the range (0.0, 1.0) that specifies the fractional point at + * which the original stream should be split. The actual split point is + * evaluated on pre-filtered rows, so if a filter is provided, then there is + * no guarantee that the division of the rows between the new child streams + * will be proportional to this fractional value. Additionally, because the + * server-side unit for assigning data is collections of rows, this fraction + * will always map to to a data storage boundary on the server side. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.split_read_stream.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async + */ + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + >; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): void; + splitReadStream( + request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'original_stream.name': request.originalStream!.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.splitReadStream(request, options, callback); + } + + /** + * Reads rows from the table in the format prescribed by the read session. + * Each response contains one or more table rows, up to a maximum of 10 MiB + * per response; read requests which attempt to read individual rows larger + * than this will fail. + * + * Each request also returns a set of stream statistics reflecting the + * estimated total number of rows in the read stream. This number is computed + * based on the total table size and the number of active streams in the read + * session, and may change as other streams continue to read data. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.storage.v1beta1.StreamPosition} request.readPosition + * Required. Identifier of the position in the stream to start reading from. + * The offset requested must be less than the last row read from ReadRows. + * Requesting a larger offset is undefined. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * for more details and examples. + * @example include:samples/generated/v1beta1/big_query_storage.read_rows.js + * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async + */ + readRows( + request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, + options?: CallOptions + ): gax.CancellableStream { + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + 'read_position.stream.name': request.readPosition!.stream!.name ?? '', + }); + this.initialize(); + return this.innerApiCalls.readRows(request, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project: string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified readSession resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @returns {string} Resource name string. + */ + readSessionPath(project: string, location: string, session: string) { + return this.pathTemplates.readSessionPathTemplate.render({ + project: project, + location: location, + session: session, + }); + } + + /** + * Parse the project from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .project; + } + + /** + * Parse the location from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .location; + } + + /** + * Parse the session from ReadSession resource. + * + * @param {string} readSessionName + * A fully-qualified path representing ReadSession resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadSessionName(readSessionName: string) { + return this.pathTemplates.readSessionPathTemplate.match(readSessionName) + .session; + } + + /** + * Return a fully-qualified stream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} stream + * @returns {string} Resource name string. + */ + streamPath(project: string, location: string, stream: string) { + return this.pathTemplates.streamPathTemplate.render({ + project: project, + location: location, + stream: stream, + }); + } + + /** + * Parse the project from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromStreamName(streamName: string) { + return this.pathTemplates.streamPathTemplate.match(streamName).project; + } + + /** + * Parse the location from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromStreamName(streamName: string) { + return this.pathTemplates.streamPathTemplate.match(streamName).location; + } + + /** + * Parse the stream from Stream resource. + * + * @param {string} streamName + * A fully-qualified path representing Stream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromStreamName(streamName: string) { + return this.pathTemplates.streamPathTemplate.match(streamName).stream; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.bigQueryStorageStub && !this._terminated) { + return this.bigQueryStorageStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json new file mode 100644 index 00000000000..003cb084ff8 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client_config.json @@ -0,0 +1,54 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1beta1.BigQueryStorage": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateReadSession": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ReadRows": { + "timeout_millis": 86400000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchCreateReadSessionStreams": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "FinalizeStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SplitReadStream": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json new file mode 100644 index 00000000000..0b8010758a6 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1beta1/arrow.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/avro.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/read_options.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/storage.proto", + "../../protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto" +] diff --git a/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json b/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json new file mode 100644 index 00000000000..00d888bf605 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/gapic_metadata.json @@ -0,0 +1,68 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1beta1", + "libraryPackage": "@google-cloud/bigquery-storage", + "services": { + "BigQueryStorage": { + "clients": { + "grpc": { + "libraryClient": "BigQueryStorageClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "BatchCreateReadSessionStreams": { + "methods": [ + "batchCreateReadSessionStreams" + ] + }, + "FinalizeStream": { + "methods": [ + "finalizeStream" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + }, + "ReadRows": { + "methods": [ + "readRows" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BigQueryStorageClient", + "rpcs": { + "CreateReadSession": { + "methods": [ + "createReadSession" + ] + }, + "BatchCreateReadSessionStreams": { + "methods": [ + "batchCreateReadSessionStreams" + ] + }, + "FinalizeStream": { + "methods": [ + "finalizeStream" + ] + }, + "SplitReadStream": { + "methods": [ + "splitReadStream" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts new file mode 100644 index 00000000000..dc3afed8ea7 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {BigQueryStorageClient} from './big_query_storage_client'; diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js new file mode 100644 index 00000000000..d59c13c62cd --- /dev/null +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const storage = require('@google-cloud/bigquery-storage'); + +function main() { + const bigQueryReadClient = new storage.BigQueryReadClient(); + const bigQueryWriteClient = new storage.BigQueryWriteClient(); +} + +main(); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts new file mode 100644 index 00000000000..6fd6e3ca7ee --- /dev/null +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,41 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { + BigQueryReadClient, + BigQueryWriteClient, +} from '@google-cloud/bigquery-storage'; + +// check that the client class type name can be used +function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { + client.close(); +} +function doStuffWithBigQueryWriteClient(client: BigQueryWriteClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const bigQueryReadClient = new BigQueryReadClient(); + doStuffWithBigQueryReadClient(bigQueryReadClient); + // check that the client instance can be created + const bigQueryWriteClient = new BigQueryWriteClient(); + doStuffWithBigQueryWriteClient(bigQueryWriteClient); +} + +main(); diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts new file mode 100644 index 00000000000..6dd1eaadafa --- /dev/null +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -0,0 +1,51 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + it('TypeScript code', async function () { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync( + './system-test/fixtures/sample/src/index.ts' + ).toString(), + }, + }; + await packNTest(options); + }); + + it('JavaScript code', async function () { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync( + './system-test/fixtures/sample/src/index.js' + ).toString(), + }, + }; + await packNTest(options); + }); +}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts new file mode 100644 index 00000000000..cd2a50bb2c1 --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -0,0 +1,896 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigqueryreadModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubServerStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // write something to the stream to trigger transformStub and send the response back to the client + setImmediate(() => { + mockStream.write({}); + }); + setImmediate(() => { + mockStream.end(); + }); + return sinon.stub().returns(mockStream); +} + +describe('v1.BigQueryReadClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigqueryreadModule.v1.BigQueryReadClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + await client.initialize(); + assert(client.bigQueryReadStub); + }); + + it('has close method for the initialized client', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryReadStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryReadStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createReadSession', () => { + it('invokes createReadSession without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); + const [response] = await client.createReadSession(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession without error using callback', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadSession() + ); + client.innerApiCalls.createReadSession = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createReadSession( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IReadSession | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.createReadSession = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.createReadSession(request), expectedError); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + ); + request.readSession ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', + ['readSession', 'table'] + ); + request.readSession.table = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createReadSession(request), expectedError); + }); + }); + + describe('splitReadStream', () => { + it('invokes splitReadStream without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); + const [response] = await client.splitReadStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream without error using callback', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.splitReadStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.splitReadStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.splitReadStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.splitReadStream(request), expectedError); + }); + }); + + describe('readRows', () => { + it('invokes readRows without error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes readRows with error', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.readRows = stubServerStreamingCall( + undefined, + expectedError + ); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes readRows with closed client', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + }); + }); + + describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readStream', () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('table', () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('writeStream', () => { + const fakePath = '/rendered/path/writeStream'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + stream: 'streamValue', + }; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.writeStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.writeStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('writeStreamPath', () => { + const result = client.writeStreamPath( + 'projectValue', + 'datasetValue', + 'tableValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromWriteStreamName', () => { + const result = client.matchProjectFromWriteStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromWriteStreamName', () => { + const result = client.matchDatasetFromWriteStreamName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromWriteStreamName', () => { + const result = client.matchTableFromWriteStreamName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromWriteStreamName', () => { + const result = client.matchStreamFromWriteStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts new file mode 100644 index 00000000000..3623068f487 --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -0,0 +1,1055 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigquerystorageModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubServerStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // write something to the stream to trigger transformStub and send the response back to the client + setImmediate(() => { + mockStream.write({}); + }); + setImmediate(() => { + mockStream.end(); + }); + return sinon.stub().returns(mockStream); +} + +describe('v1beta1.BigQueryStorageClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigquerystorageModule.v1beta1.BigQueryStorageClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + await client.initialize(); + assert(client.bigQueryStorageStub); + }); + + it('has close method for the initialized client', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryStorageStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryStorageStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createReadSession', () => { + it('invokes createReadSession without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + ); + client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); + const [response] = await client.createReadSession(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + ); + client.innerApiCalls.createReadSession = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createReadSession( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.IReadSession | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedError = new Error('expected'); + client.innerApiCalls.createReadSession = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.createReadSession(request), expectedError); + const actualRequest = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createReadSession as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createReadSession with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + ); + request.tableReference ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'projectId'] + ); + request.tableReference.projectId = defaultValue1; + request.tableReference ??= {}; + const defaultValue2 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', + ['tableReference', 'datasetId'] + ); + request.tableReference.datasetId = defaultValue2; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createReadSession(request), expectedError); + }); + }); + + describe('batchCreateReadSessionStreams', () => { + it('invokes batchCreateReadSessionStreams without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + ); + client.innerApiCalls.batchCreateReadSessionStreams = + stubSimpleCall(expectedResponse); + const [response] = await client.batchCreateReadSessionStreams(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateReadSessionStreams without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + ); + client.innerApiCalls.batchCreateReadSessionStreams = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchCreateReadSessionStreams( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateReadSessionStreams with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchCreateReadSessionStreams(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateReadSessionStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateReadSessionStreams with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + ); + request.session ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', + ['session', 'name'] + ); + request.session.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchCreateReadSessionStreams(request), + expectedError + ); + }); + }); + + describe('finalizeStream', () => { + it('invokes finalizeStream without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.finalizeStream = stubSimpleCall(expectedResponse); + const [response] = await client.finalizeStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeStream without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.finalizeStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.finalizeStream( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeStream with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.finalizeStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.finalizeStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeStream with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + ); + request.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', + ['stream', 'name'] + ); + request.stream.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.finalizeStream(request), expectedError); + }); + }); + + describe('splitReadStream', () => { + it('invokes splitReadStream without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); + const [response] = await client.splitReadStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream without error using callback', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + ); + client.innerApiCalls.splitReadStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.splitReadStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.splitReadStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.splitReadStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.splitReadStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes splitReadStream with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + ); + request.originalStream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', + ['originalStream', 'name'] + ); + request.originalStream.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.splitReadStream(request), expectedError); + }); + }); + + describe('readRows', () => { + it('invokes readRows without error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes readRows with error', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.readRows = stubServerStreamingCall( + undefined, + expectedError + ); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes readRows with closed client', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + }); + }); + + describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('stream', () => { + const fakePath = '/rendered/path/stream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + stream: 'streamValue', + }; + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.streamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.streamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('streamPath', () => { + const result = client.streamPath( + 'projectValue', + 'locationValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.streamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromStreamName', () => { + const result = client.matchProjectFromStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromStreamName', () => { + const result = client.matchLocationFromStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromStreamName', () => { + const result = client.matchStreamFromStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.streamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts new file mode 100644 index 00000000000..830e18a06bb --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as bigquerywriteModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubBidiStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + return sinon.stub().returns(mockStream); +} + +describe('v1.BigQueryWriteClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + bigquerywriteModule.v1.BigQueryWriteClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = + bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = bigquerywriteModule.v1.BigQueryWriteClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + await client.initialize(); + assert(client.bigQueryWriteStub); + }); + + it('has close method for the initialized client', done => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.bigQueryWriteStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.bigQueryWriteStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createWriteStream', () => { + it('invokes createWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.createWriteStream = stubSimpleCall(expectedResponse); + const [response] = await client.createWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.createWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.createWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.createWriteStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.createWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes createWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createWriteStream(request), expectedError); + }); + }); + + describe('getWriteStream', () => { + it('invokes getWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.getWriteStream = stubSimpleCall(expectedResponse); + const [response] = await client.getWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes getWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.WriteStream() + ); + client.innerApiCalls.getWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes getWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.getWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.getWriteStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.getWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes getWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getWriteStream(request), expectedError); + }); + }); + + describe('finalizeWriteStream', () => { + it('invokes finalizeWriteStream without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + ); + client.innerApiCalls.finalizeWriteStream = + stubSimpleCall(expectedResponse); + const [response] = await client.finalizeWriteStream(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeWriteStream without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + ); + client.innerApiCalls.finalizeWriteStream = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.finalizeWriteStream( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeWriteStream with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.finalizeWriteStream = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(client.finalizeWriteStream(request), expectedError); + const actualRequest = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.finalizeWriteStream as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes finalizeWriteStream with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', + ['name'] + ); + request.name = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.finalizeWriteStream(request), expectedError); + }); + }); + + describe('batchCommitWriteStreams', () => { + it('invokes batchCommitWriteStreams without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + ); + client.innerApiCalls.batchCommitWriteStreams = + stubSimpleCall(expectedResponse); + const [response] = await client.batchCommitWriteStreams(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCommitWriteStreams without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + ); + client.innerApiCalls.batchCommitWriteStreams = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchCommitWriteStreams( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCommitWriteStreams with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchCommitWriteStreams(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCommitWriteStreams as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCommitWriteStreams with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchCommitWriteStreams(request), + expectedError + ); + }); + }); + + describe('flushRows', () => { + it('invokes flushRows without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + ); + client.innerApiCalls.flushRows = stubSimpleCall(expectedResponse); + const [response] = await client.flushRows(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes flushRows without error using callback', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + ); + client.innerApiCalls.flushRows = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.flushRows( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes flushRows with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.flushRows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.flushRows(request), expectedError); + const actualRequest = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.flushRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes flushRows with closed client', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.FlushRowsRequest', + ['writeStream'] + ); + request.writeStream = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.flushRows(request), expectedError); + }); + }); + + describe('appendRows', () => { + it('invokes appendRows without error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + ); + + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse() + ); + client.innerApiCalls.appendRows = stubBidiStreamingCall(expectedResponse); + const stream = client.appendRows(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.appendRows as SinonStub) + .getCall(0) + .calledWith(null) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + + it('invokes appendRows with error', async () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + ); + const expectedError = new Error('expected'); + client.innerApiCalls.appendRows = stubBidiStreamingCall( + undefined, + expectedError + ); + const stream = client.appendRows(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + await assert.rejects(promise, expectedError); + assert( + (client.innerApiCalls.appendRows as SinonStub) + .getCall(0) + .calledWith(null) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + }); + + describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readSession', () => { + const fakePath = '/rendered/path/readSession'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readSessionPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readSessionPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readSessionPath', () => { + const result = client.readSessionPath( + 'projectValue', + 'locationValue', + 'sessionValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readSessionPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadSessionName', () => { + const result = client.matchProjectFromReadSessionName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadSessionName', () => { + const result = client.matchLocationFromReadSessionName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadSessionName', () => { + const result = client.matchSessionFromReadSessionName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readSessionPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('readStream', () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('table', () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('writeStream', () => { + const fakePath = '/rendered/path/writeStream'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + stream: 'streamValue', + }; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.writeStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.writeStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('writeStreamPath', () => { + const result = client.writeStreamPath( + 'projectValue', + 'datasetValue', + 'tableValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromWriteStreamName', () => { + const result = client.matchProjectFromWriteStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromWriteStreamName', () => { + const result = client.matchDatasetFromWriteStreamName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromWriteStreamName', () => { + const result = client.matchTableFromWriteStreamName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromWriteStreamName', () => { + const result = client.matchStreamFromWriteStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json new file mode 100644 index 00000000000..c78f1c884ef --- /dev/null +++ b/handwritten/bigquery-storage/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/handwritten/bigquery-storage/webpack.config.js b/handwritten/bigquery-storage/webpack.config.js new file mode 100644 index 00000000000..de163617408 --- /dev/null +++ b/handwritten/bigquery-storage/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'BigQueryRead', + filename: './big-query-read.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/, + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader', + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader', + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader', + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader', + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader', + }, + ], + }, + mode: 'production', +}; From 72e5d86357088f42728ef00ba5e88a6ccb72fb4d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 22:48:33 +0000 Subject: [PATCH 204/333] build: have Kokoro grab service account credentials from secret that will be rotated for system tests (#315) Source-Link: https://togithub.com/googleapis/synthtool/commit/abbc97db69a57dcb991ba97ef503305b701ffb3a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:fe04ae044dadf5ad88d979dbcc85e0e99372fb5d6316790341e6aca5e4e3fbc8 --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 3 +-- .../.kokoro/continuous/node12/samples-test.cfg | 5 +++++ .../.kokoro/continuous/node12/system-test.cfg | 5 +++++ .../.kokoro/presubmit/node12/samples-test.cfg | 5 +++++ .../.kokoro/presubmit/node12/system-test.cfg | 5 +++++ handwritten/bigquery-storage/.kokoro/samples-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/system-test.sh | 2 +- 7 files changed, 23 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index e97989708da..788f7a9fdff 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:5b05f26103855c3a15433141389c478d1d3fe088fb5d4e3217c4793f6b3f245e -# created: 2022-11-04 + digest: sha256:fe04ae044dadf5ad88d979dbcc85e0e99372fb5d6316790341e6aca5e4e3fbc8 diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg index 68b02101fc1..9ca77598360 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg @@ -5,3 +5,8 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" } + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg index 3ccb29d69f8..42454cf416c 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg @@ -5,3 +5,8 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" } + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg index 68b02101fc1..9ca77598360 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg @@ -5,3 +5,8 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" } + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg index 3ccb29d69f8..42454cf416c 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg @@ -5,3 +5,8 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" } + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index fbc058a4ec4..806c0082236 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -19,7 +19,7 @@ set -eo pipefail export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account export GCLOUD_PROJECT=long-door-651 cd $(dirname $0)/.. diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 87fa0653d76..0201e9dfd71 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -19,7 +19,7 @@ set -eo pipefail export NPM_CONFIG_PREFIX=${HOME}/.npm-global # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account export GCLOUD_PROJECT=long-door-651 cd $(dirname $0)/.. From d9b14b620ceb0e77912c0d10fd5ba44f18a9642f Mon Sep 17 00:00:00 2001 From: Lo Ferris <50979514+loferris@users.noreply.github.com> Date: Thu, 22 Dec 2022 10:23:44 -0800 Subject: [PATCH 205/333] fix: manual update from nodejs postprocessor (#316) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * manual update from nodejs postprocessor * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/storage.proto | 69 +++++++-- .../cloud/bigquery/storage/v1/stream.proto | 48 +++--- .../cloud/bigquery/storage/v1/table.proto | 3 +- .../bigquery-storage/protos/protos.d.ts | 19 +++ handwritten/bigquery-storage/protos/protos.js | 139 +++++++++++++++++- .../bigquery-storage/protos/protos.json | 19 +++ .../v1/big_query_write.append_rows.js | 25 +++- ..._query_write.batch_commit_write_streams.js | 4 +- ...data.google.cloud.bigquery.storage.v1.json | 6 +- .../src/v1/big_query_write_client.ts | 4 +- 10 files changed, 291 insertions(+), 45 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index df602135b9d..85daf6dfa22 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -73,7 +73,8 @@ service BigQueryRead { post: "/v1/{read_session.table=projects/*/datasets/*/tables/*}" body: "*" }; - option (google.api.method_signature) = "parent,read_session,max_stream_count"; + option (google.api.method_signature) = + "parent,read_session,max_stream_count"; } // Reads rows from the stream in the format prescribed by the ReadSession. @@ -102,7 +103,8 @@ service BigQueryRead { // original, primary, and residual, that original[0-j] = primary[0-j] and // original[j-n] = residual[0-m] once the streams have been read to // completion. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + rpc SplitReadStream(SplitReadStreamRequest) + returns (SplitReadStreamResponse) { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/sessions/*/streams/*}" }; @@ -186,7 +188,8 @@ service BigQueryWrite { // Finalize a write stream so that no new data can be appended to the // stream. Finalize is not supported on the '_default' stream. - rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) { + rpc FinalizeWriteStream(FinalizeWriteStreamRequest) + returns (FinalizeWriteStreamResponse) { option (google.api.http) = { post: "/v1/{name=projects/*/datasets/*/tables/*/streams/*}" body: "*" @@ -200,7 +203,8 @@ service BigQueryWrite { // Streams must be finalized before commit and cannot be committed multiple // times. Once a stream is committed, data in the stream becomes available // for read operations. - rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) { + rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) + returns (BatchCommitWriteStreamsResponse) { option (google.api.http) = { get: "/v1/{parent=projects/*/datasets/*/tables/*}" }; @@ -384,9 +388,7 @@ message CreateWriteStreamRequest { // of `projects/{project}/datasets/{dataset}/tables/{table}`. string parent = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } ]; // Required. Stream to be created. @@ -417,10 +419,27 @@ message AppendRowsRequest { ProtoRows rows = 2; } - // Required. The write_stream identifies the target of the append operation, and only - // needs to be specified as part of the first request on the gRPC connection. - // If provided for subsequent requests, it must match the value of the first - // request. + // An enum to indicate how to interpret missing values. Missing values are + // fields present in user schema but missing in rows. A missing value can + // represent a NULL or a column default value defined in BigQuery table + // schema. + enum MissingValueInterpretation { + // Invalid missing value interpretation. Requests with this value will be + // rejected. + MISSING_VALUE_INTERPRETATION_UNSPECIFIED = 0; + + // Missing value is interpreted as NULL. + NULL_VALUE = 1; + + // Missing value is interpreted as column default value if declared in the + // table schema, NULL otherwise. + DEFAULT_VALUE = 2; + } + + // Required. The write_stream identifies the target of the append operation, + // and only needs to be specified as part of the first request on the gRPC + // connection. If provided for subsequent requests, it must match the value of + // the first request. // // For explicitly created write streams, the format is: // @@ -454,6 +473,26 @@ message AppendRowsRequest { // Id set by client to annotate its identity. Only initial request setting is // respected. string trace_id = 6; + + // A map to indicate how to interpret missing value for some fields. Missing + // values are fields present in user schema but missing in rows. The key is + // the field name. The value is the interpretation of missing values for the + // field. + // + // For example, a map {'foo': NULL_VALUE, 'bar': DEFAULT_VALUE} means all + // missing values in field foo are interpreted as NULL, all missing values in + // field bar are interpreted as the default value of field bar in table + // schema. + // + // If a field is not in this map and has missing values, the missing values + // in this field are interpreted as NULL. + // + // This field only applies to the current request, it won't affect other + // requests on the connection. + // + // Currently, field name can only be top-level column name, can't be a struct + // field path like 'foo.bar'. + map missing_value_interpretations = 7; } // Response message for `AppendRows`. @@ -525,13 +564,11 @@ message GetWriteStreamRequest { // Request message for `BatchCommitWriteStreams`. message BatchCommitWriteStreamsRequest { - // Required. Parent table that all the streams should belong to, in the form of - // `projects/{project}/datasets/{dataset}/tables/{table}`. + // Required. Parent table that all the streams should belong to, in the form + // of `projects/{project}/datasets/{dataset}/tables/{table}`. string parent = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } ]; // Required. The group of streams that will be committed atomically. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index fe71adfa6b7..ec137de19dd 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -122,10 +122,12 @@ message ReadSession { oneof output_format_serialization_options { // Optional. Options specific to the Apache Arrow output format. - ArrowSerializationOptions arrow_serialization_options = 3 [(google.api.field_behavior) = OPTIONAL]; + ArrowSerializationOptions arrow_serialization_options = 3 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Options specific to the Apache Avro output format - AvroSerializationOptions avro_serialization_options = 4 [(google.api.field_behavior) = OPTIONAL]; + AvroSerializationOptions avro_serialization_options = 4 + [(google.api.field_behavior) = OPTIONAL]; } } @@ -133,12 +135,15 @@ message ReadSession { // `projects/{project_id}/locations/{location}/sessions/{session_id}`. string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Time at which the session becomes invalid. After this time, subsequent - // requests to read this Session will return errors. The expire_time is - // automatically assigned and currently cannot be specified or updated. - google.protobuf.Timestamp expire_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Time at which the session becomes invalid. After this time, + // subsequent requests to read this Session will return errors. The + // expire_time is automatically assigned and currently cannot be specified or + // updated. + google.protobuf.Timestamp expire_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; - // Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported. + // Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not + // supported. DataFormat data_format = 3 [(google.api.field_behavior) = IMMUTABLE]; // The schema for the read. If read_options.selected_fields is set, the @@ -156,12 +161,11 @@ message ReadSession { // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}` string table = 6 [ (google.api.field_behavior) = IMMUTABLE, - (google.api.resource_reference) = { - type: "bigquery.googleapis.com/Table" - } + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } ]; - // Optional. Any modifiers which are applied when reading from the specified table. + // Optional. Any modifiers which are applied when reading from the specified + // table. TableModifiers table_modifiers = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. Read options for this session (e.g. column selection, filters). @@ -178,10 +182,16 @@ message ReadSession { // Output only. An estimate on the number of bytes this session will scan when // all streams are completely consumed. This estimate is based on // metadata from the table which might be incomplete or stale. - int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + int64 estimated_total_bytes_scanned = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An estimate on the number of rows present in this session's + // streams. This estimate is based on metadata from the table which might be + // incomplete or stale. + int64 estimated_row_count = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. ID set by client to annotate a session identity. This does not need - // to be strictly unique, but instead the same ID should be used to group + // Optional. ID set by client to annotate a session identity. This does not + // need to be strictly unique, but instead the same ID should be used to group // logically connected sessions (e.g. All using the same ID for all sessions // needed to complete a Spark SQL query is reasonable). // @@ -260,15 +270,17 @@ message WriteStream { // Immutable. Type of the stream. Type type = 2 [(google.api.field_behavior) = IMMUTABLE]; - // Output only. Create time of the stream. For the _default stream, this is the - // creation_time of the table. - google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Create time of the stream. For the _default stream, this is + // the creation_time of the table. + google.protobuf.Timestamp create_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Commit time of the stream. // If a stream is of `COMMITTED` type, then it will have a commit_time same as // `create_time`. If the stream is of `PENDING` type, empty commit_time // means it is not committed. - google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp commit_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The schema of the destination table. It is only returned in // `CreateWriteStream` response. Caller should generate data that's diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index fa4f840c580..57e79334247 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -107,7 +107,8 @@ message TableFieldSchema { // Optional. The field mode. The default value is NULLABLE. Mode mode = 3 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Describes the nested schema fields if the type property is set to STRUCT. + // Optional. Describes the nested schema fields if the type property is set to + // STRUCT. repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. The field description. The maximum length is 1,024 characters. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 7d4eae00205..4e868e7696e 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -2044,6 +2044,9 @@ export namespace google { /** AppendRowsRequest traceId */ traceId?: (string|null); + + /** AppendRowsRequest missingValueInterpretations */ + missingValueInterpretations?: ({ [k: string]: google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation }|null); } /** Represents an AppendRowsRequest. */ @@ -2067,6 +2070,9 @@ export namespace google { /** AppendRowsRequest traceId. */ public traceId: string; + /** AppendRowsRequest missingValueInterpretations. */ + public missingValueInterpretations: { [k: string]: google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation }; + /** AppendRowsRequest rows. */ public rows?: "protoRows"; @@ -2252,6 +2258,13 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + /** MissingValueInterpretation enum. */ + enum MissingValueInterpretation { + MISSING_VALUE_INTERPRETATION_UNSPECIFIED = 0, + NULL_VALUE = 1, + DEFAULT_VALUE = 2 + } } /** Properties of an AppendRowsResponse. */ @@ -3465,6 +3478,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned */ estimatedTotalBytesScanned?: (number|Long|string|null); + /** ReadSession estimatedRowCount */ + estimatedRowCount?: (number|Long|string|null); + /** ReadSession traceId */ traceId?: (string|null); } @@ -3508,6 +3524,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned. */ public estimatedTotalBytesScanned: (number|Long|string); + /** ReadSession estimatedRowCount. */ + public estimatedRowCount: (number|Long|string); + /** ReadSession traceId. */ public traceId: string; diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 53da7298f1a..5cd70494b1c 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -4532,6 +4532,7 @@ * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows * @property {string|null} [traceId] AppendRowsRequest traceId + * @property {Object.|null} [missingValueInterpretations] AppendRowsRequest missingValueInterpretations */ /** @@ -4543,6 +4544,7 @@ * @param {google.cloud.bigquery.storage.v1.IAppendRowsRequest=} [properties] Properties to set */ function AppendRowsRequest(properties) { + this.missingValueInterpretations = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -4581,6 +4583,14 @@ */ AppendRowsRequest.prototype.traceId = ""; + /** + * AppendRowsRequest missingValueInterpretations. + * @member {Object.} missingValueInterpretations + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.missingValueInterpretations = $util.emptyObject; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -4627,6 +4637,9 @@ $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId); + if (message.missingValueInterpretations != null && Object.hasOwnProperty.call(message, "missingValueInterpretations")) + for (var keys = Object.keys(message.missingValueInterpretations), i = 0; i < keys.length; ++i) + writer.uint32(/* id 7, wireType 2 =*/58).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 0 =*/16).int32(message.missingValueInterpretations[keys[i]]).ldelim(); return writer; }; @@ -4657,7 +4670,7 @@ AppendRowsRequest.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(), key, value; while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -4677,6 +4690,29 @@ message.traceId = reader.string(); break; } + case 7: { + if (message.missingValueInterpretations === $util.emptyObject) + message.missingValueInterpretations = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = 0; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.int32(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.missingValueInterpretations[key] = value; + break; + } default: reader.skipType(tag & 7); break; @@ -4732,6 +4768,20 @@ if (message.traceId != null && message.hasOwnProperty("traceId")) if (!$util.isString(message.traceId)) return "traceId: string expected"; + if (message.missingValueInterpretations != null && message.hasOwnProperty("missingValueInterpretations")) { + if (!$util.isObject(message.missingValueInterpretations)) + return "missingValueInterpretations: object expected"; + var key = Object.keys(message.missingValueInterpretations); + for (var i = 0; i < key.length; ++i) + switch (message.missingValueInterpretations[key[i]]) { + default: + return "missingValueInterpretations: enum value{k:string} expected"; + case 0: + case 1: + case 2: + break; + } + } return null; }; @@ -4761,6 +4811,32 @@ } if (object.traceId != null) message.traceId = String(object.traceId); + if (object.missingValueInterpretations) { + if (typeof object.missingValueInterpretations !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.missingValueInterpretations: object expected"); + message.missingValueInterpretations = {}; + for (var keys = Object.keys(object.missingValueInterpretations), i = 0; i < keys.length; ++i) + switch (object.missingValueInterpretations[keys[i]]) { + default: + if (typeof object.missingValueInterpretations[keys[i]] === "number") { + message.missingValueInterpretations[keys[i]] = object.missingValueInterpretations[keys[i]]; + break; + } + break; + case "MISSING_VALUE_INTERPRETATION_UNSPECIFIED": + case 0: + message.missingValueInterpretations[keys[i]] = 0; + break; + case "NULL_VALUE": + case 1: + message.missingValueInterpretations[keys[i]] = 1; + break; + case "DEFAULT_VALUE": + case 2: + message.missingValueInterpretations[keys[i]] = 2; + break; + } + } return message; }; @@ -4777,6 +4853,8 @@ if (!options) options = {}; var object = {}; + if (options.objects || options.defaults) + object.missingValueInterpretations = {}; if (options.defaults) { object.writeStream = ""; object.offset = null; @@ -4793,6 +4871,12 @@ } if (message.traceId != null && message.hasOwnProperty("traceId")) object.traceId = message.traceId; + var keys2; + if (message.missingValueInterpretations && (keys2 = Object.keys(message.missingValueInterpretations)).length) { + object.missingValueInterpretations = {}; + for (var j = 0; j < keys2.length; ++j) + object.missingValueInterpretations[keys2[j]] = options.enums === String ? $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.missingValueInterpretations[keys2[j]]] === undefined ? message.missingValueInterpretations[keys2[j]] : $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.missingValueInterpretations[keys2[j]]] : message.missingValueInterpretations[keys2[j]]; + } return object; }; @@ -5059,6 +5143,22 @@ return ProtoData; })(); + /** + * MissingValueInterpretation enum. + * @name google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation + * @enum {number} + * @property {number} MISSING_VALUE_INTERPRETATION_UNSPECIFIED=0 MISSING_VALUE_INTERPRETATION_UNSPECIFIED value + * @property {number} NULL_VALUE=1 NULL_VALUE value + * @property {number} DEFAULT_VALUE=2 DEFAULT_VALUE value + */ + AppendRowsRequest.MissingValueInterpretation = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MISSING_VALUE_INTERPRETATION_UNSPECIFIED"] = 0; + values[valuesById[1] = "NULL_VALUE"] = 1; + values[valuesById[2] = "DEFAULT_VALUE"] = 2; + return values; + })(); + return AppendRowsRequest; })(); @@ -7914,6 +8014,7 @@ * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions * @property {Array.|null} [streams] ReadSession streams * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned + * @property {number|Long|null} [estimatedRowCount] ReadSession estimatedRowCount * @property {string|null} [traceId] ReadSession traceId */ @@ -8013,6 +8114,14 @@ */ ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** + * ReadSession estimatedRowCount. + * @member {number|Long} estimatedRowCount + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.estimatedRowCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** * ReadSession traceId. * @member {string} traceId @@ -8082,6 +8191,8 @@ writer.uint32(/* id 12, wireType 0 =*/96).int64(message.estimatedTotalBytesScanned); if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) writer.uint32(/* id 13, wireType 2 =*/106).string(message.traceId); + if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) + writer.uint32(/* id 14, wireType 0 =*/112).int64(message.estimatedRowCount); return writer; }; @@ -8158,6 +8269,10 @@ message.estimatedTotalBytesScanned = reader.int64(); break; } + case 14: { + message.estimatedRowCount = reader.int64(); + break; + } case 13: { message.traceId = reader.string(); break; @@ -8258,6 +8373,9 @@ if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) return "estimatedTotalBytesScanned: integer|Long expected"; + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) + return "estimatedRowCount: integer|Long expected"; if (message.traceId != null && message.hasOwnProperty("traceId")) if (!$util.isString(message.traceId)) return "traceId: string expected"; @@ -8344,6 +8462,15 @@ message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; else if (typeof object.estimatedTotalBytesScanned === "object") message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); + if (object.estimatedRowCount != null) + if ($util.Long) + (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; + else if (typeof object.estimatedRowCount === "string") + message.estimatedRowCount = parseInt(object.estimatedRowCount, 10); + else if (typeof object.estimatedRowCount === "number") + message.estimatedRowCount = object.estimatedRowCount; + else if (typeof object.estimatedRowCount === "object") + message.estimatedRowCount = new $util.LongBits(object.estimatedRowCount.low >>> 0, object.estimatedRowCount.high >>> 0).toNumber(); if (object.traceId != null) message.traceId = String(object.traceId); return message; @@ -8377,6 +8504,11 @@ } else object.estimatedTotalBytesScanned = options.longs === String ? "0" : 0; object.traceId = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedRowCount = options.longs === String ? "0" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -8412,6 +8544,11 @@ object.estimatedTotalBytesScanned = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalBytesScanned) : options.longs === Number ? new $util.LongBits(message.estimatedTotalBytesScanned.low >>> 0, message.estimatedTotalBytesScanned.high >>> 0).toNumber() : message.estimatedTotalBytesScanned; if (message.traceId != null && message.hasOwnProperty("traceId")) object.traceId = message.traceId; + if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) + if (typeof message.estimatedRowCount === "number") + object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; + else + object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index e8388095ced..7b61acc3084 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -510,6 +510,11 @@ "traceId": { "type": "string", "id": 6 + }, + "missingValueInterpretations": { + "keyType": "string", + "type": "MissingValueInterpretation", + "id": 7 } }, "nested": { @@ -524,6 +529,13 @@ "id": 2 } } + }, + "MissingValueInterpretation": { + "values": { + "MISSING_VALUE_INTERPRETATION_UNSPECIFIED": 0, + "NULL_VALUE": 1, + "DEFAULT_VALUE": 2 + } } } }, @@ -812,6 +824,13 @@ "(google.api.field_behavior)": "OUTPUT_ONLY" } }, + "estimatedRowCount": { + "type": "int64", + "id": 14, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, "traceId": { "type": "string", "id": 13, diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 24764fa1010..1db34235ed4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -29,10 +29,10 @@ function main(writeStream) { * TODO(developer): Uncomment these variables before running the sample. */ /** - * Required. The write_stream identifies the target of the append operation, and only - * needs to be specified as part of the first request on the gRPC connection. - * If provided for subsequent requests, it must match the value of the first - * request. + * Required. The write_stream identifies the target of the append operation, + * and only needs to be specified as part of the first request on the gRPC + * connection. If provided for subsequent requests, it must match the value of + * the first request. * For explicitly created write streams, the format is: * * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` * For the special default stream, the format is: @@ -55,6 +55,23 @@ function main(writeStream) { * respected. */ // const traceId = 'abc123' + /** + * A map to indicate how to interpret missing value for some fields. Missing + * values are fields present in user schema but missing in rows. The key is + * the field name. The value is the interpretation of missing values for the + * field. + * For example, a map {'foo': NULL_VALUE, 'bar': DEFAULT_VALUE} means all + * missing values in field foo are interpreted as NULL, all missing values in + * field bar are interpreted as the default value of field bar in table + * schema. + * If a field is not in this map and has missing values, the missing values + * in this field are interpreted as NULL. + * This field only applies to the current request, it won't affect other + * requests on the connection. + * Currently, field name can only be top-level column name, can't be a struct + * field path like 'foo.bar'. + */ + // const missingValueInterpretations = 1234 // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 90cdefc4f94..5ca17b86b80 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -29,8 +29,8 @@ function main(parent, writeStreams) { * TODO(developer): Uncomment these variables before running the sample. */ /** - * Required. Parent table that all the streams should belong to, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}`. + * Required. Parent table that all the streams should belong to, in the form + * of `projects/{project}/datasets/{dataset}/tables/{table}`. */ // const parent = 'abc123' /** diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 94f815441aa..5aba60cad3c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -206,7 +206,7 @@ "segments": [ { "start": 25, - "end": 80, + "end": 97, "type": "FULL" } ], @@ -230,6 +230,10 @@ { "name": "trace_id", "type": "TYPE_STRING" + }, + { + "name": "missing_value_interpretations", + "type": "TYPE_MESSAGE[]" } ], "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse", diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 6e002a67e7c..13955be25fd 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -675,8 +675,8 @@ export class BigQueryWriteClient { * @param {Object} request * The request object that will be sent. * @param {string} request.parent - * Required. Parent table that all the streams should belong to, in the form of - * `projects/{project}/datasets/{dataset}/tables/{table}`. + * Required. Parent table that all the streams should belong to, in the form + * of `projects/{project}/datasets/{dataset}/tables/{table}`. * @param {string[]} request.writeStreams * Required. The group of streams that will be committed atomically. * @param {object} [options] From 0c76104b176d02de6490b59be5f74ce4c815de31 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Jan 2023 20:26:08 +0000 Subject: [PATCH 206/333] chore(deps): update dependency sinon to v15 (#311) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2d6e2462757..2bd43773cf2 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -42,7 +42,7 @@ "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", + "sinon": "^15.0.0", "ts-loader": "^9.0.0", "typescript": "^4.8.3", "webpack": "^5.0.0", From 07884908aa780622ba393c880a5e6de100ad48b6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 11 Jan 2023 19:30:31 +0000 Subject: [PATCH 207/333] chore(deps): update dependency webpack-cli to v5 (#312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update dependency webpack-cli to v5 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- handwritten/bigquery-storage/package.json | 2 +- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 2bd43773cf2..56d2f8e9d1f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -46,7 +46,7 @@ "ts-loader": "^9.0.0", "typescript": "^4.8.3", "webpack": "^5.0.0", - "webpack-cli": "^4.0.0" + "webpack-cli": "^5.0.0" }, "engines": { "node": ">=12.0.0" diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 4e868e7696e..399d28bdd8e 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 5cd70494b1c..730966bc2c4 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From efed7308ced6ee66d655c89198fd3528773ca805 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 13:44:25 -0800 Subject: [PATCH 208/333] chore(main): release 3.2.1 (#317) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 3.2.1 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index eeb46ea09fd..878c5a4f8c9 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [3.2.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.2.0...v3.2.1) (2023-01-11) + + +### Bug Fixes + +* Manual update from nodejs postprocessor ([#316](https://github.com/googleapis/nodejs-bigquery-storage/issues/316)) ([3861cf6](https://github.com/googleapis/nodejs-bigquery-storage/commit/3861cf625aa01511eb6bdcf99e372dbd5bec6fa9)) + ## [3.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.1.1...v3.2.0) (2022-11-11) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 56d2f8e9d1f..0e58f3b4058 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.2.0", + "version": "3.2.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 5aba60cad3c..d2d33130dc4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.2.0", + "version": "3.2.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 0f8c9365858..b40825ce896 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.2.0", + "version": "3.2.1", "language": "TYPESCRIPT", "apis": [ { From 1c538357b288d5329cba90ce0440f73b6419503e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 13:54:56 -0800 Subject: [PATCH 209/333] feat: Added SuggestConversationSummary RPC (#318) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Added Snooze API support PiperOrigin-RevId: 500543032 Source-Link: https://github.com/googleapis/googleapis/commit/d4864bf1425882fddb80ffb627c385ec22d1fd00 Source-Link: https://github.com/googleapis/googleapis-gen/commit/245031557f8852e8e089a6511f63fc226703fef9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjQ1MDMxNTU3Zjg4NTJlOGUwODlhNjUxMWY2M2ZjMjI2NzAzZmVmOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Added SuggestConversationSummary RPC docs: updated go library package PiperOrigin-RevId: 501862436 Source-Link: https://github.com/googleapis/googleapis/commit/155e0f4123ba003055587768944a47498c48926b Source-Link: https://github.com/googleapis/googleapis-gen/commit/3051f617a991c274c88d27064e803095e4ef9d39 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzA1MWY2MTdhOTkxYzI3NGM4OGQyNzA2NGU4MDMwOTVlNGVmOWQzOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- handwritten/bigquery-storage/.jsdoc.js | 4 ++-- .../generated/v1/big_query_read.create_read_session.js | 2 +- .../samples/generated/v1/big_query_read.read_rows.js | 2 +- .../samples/generated/v1/big_query_read.split_read_stream.js | 2 +- .../samples/generated/v1/big_query_write.append_rows.js | 2 +- .../v1/big_query_write.batch_commit_write_streams.js | 2 +- .../generated/v1/big_query_write.create_write_stream.js | 2 +- .../generated/v1/big_query_write.finalize_write_stream.js | 2 +- .../samples/generated/v1/big_query_write.flush_rows.js | 2 +- .../samples/generated/v1/big_query_write.get_write_stream.js | 2 +- .../big_query_storage.batch_create_read_session_streams.js | 2 +- .../v1beta1/big_query_storage.create_read_session.js | 2 +- .../generated/v1beta1/big_query_storage.finalize_stream.js | 2 +- .../samples/generated/v1beta1/big_query_storage.read_rows.js | 2 +- .../generated/v1beta1/big_query_storage.split_read_stream.js | 2 +- handwritten/bigquery-storage/src/v1/big_query_read_client.ts | 2 +- handwritten/bigquery-storage/src/v1/big_query_write_client.ts | 2 +- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../bigquery-storage/src/v1beta1/big_query_storage_client.ts | 2 +- handwritten/bigquery-storage/src/v1beta1/index.ts | 2 +- .../bigquery-storage/system-test/fixtures/sample/src/index.js | 2 +- .../bigquery-storage/system-test/fixtures/sample/src/index.ts | 2 +- handwritten/bigquery-storage/system-test/install.ts | 2 +- handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts | 2 +- .../bigquery-storage/test/gapic_big_query_storage_v1beta1.ts | 2 +- handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts | 2 +- 26 files changed, 27 insertions(+), 27 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index 21870f2a75f..dcf0053ac8d 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2022 Google LLC', + copyright: 'Copyright 2023 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index 6cbaf38dcdf..829e5ce9cc3 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index 30bea092495..a38ba5618fe 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index a7d2a761348..e3522593f09 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 1db34235ed4..c121363cad9 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 5ca17b86b80..78ce9fa71fb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index e67550626ab..2eeb3f4a1e7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index 85faa908822..f9766ec5286 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index ae6e56be382..c9dd16b7f9a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 79b10927e9b..83e7385822a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index dd1d96f46b4..015a92acd58 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index 3bf8d5ab0df..b7ee90090d5 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index 274b957673b..41a6abd1926 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index 27981cfa6a3..2c95aad2222 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index 79c048a8427..dfaef414098 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 1a78d38a741..54c280fd703 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 13955be25fd..2743d37054b 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index f3bacd94214..3adaec0e2a4 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 19120696d61..29b8e225100 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index dc3afed8ea7..fddf65e1444 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index d59c13c62cd..c0874e08c69 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 6fd6e3ca7ee..330b1557866 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 6dd1eaadafa..f61fe236476 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index cd2a50bb2c1..e41d046a36b 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 3623068f487..2aa76e8ec7e 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index 830e18a06bb..e84e33a6fbd 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 50610c2b52b546b2affb284d9e98d6d7cf7cbf94 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 27 Jan 2023 11:57:34 -0800 Subject: [PATCH 210/333] chore(main): release 3.3.0 (#319) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 3.3.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 878c5a4f8c9..5ee19526a33 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [3.3.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.2.1...v3.3.0) (2023-01-25) + + +### Features + +* Added SuggestConversationSummary RPC ([#318](https://github.com/googleapis/nodejs-bigquery-storage/issues/318)) ([60d2ed1](https://github.com/googleapis/nodejs-bigquery-storage/commit/60d2ed1bed62611e342e41a45f831307704a5834)) + ## [3.2.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.2.0...v3.2.1) (2023-01-11) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 0e58f3b4058..9b3ca7f71ed 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.2.1", + "version": "3.3.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index d2d33130dc4..2fe9c974354 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.2.1", + "version": "3.3.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index b40825ce896..5eccd319c53 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.2.1", + "version": "3.3.0", "language": "TYPESCRIPT", "apis": [ { From 13729e150533554b49631fb0b219dc63c70b14bf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Feb 2023 11:48:19 -0800 Subject: [PATCH 211/333] chore: update import paths for Go targets to match open source location (#320) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update import paths for Go targets to match open source location chore: update go_package in protos to match open source location chore: add explicit release levels to Go gapic targets PiperOrigin-RevId: 506711567 Source-Link: https://github.com/googleapis/googleapis/commit/d02e58244db5d01607ec2ad52a47e7edce8612f0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7f1c54153125eb5abd60a32de58cfda6a798a70a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2YxYzU0MTUzMTI1ZWI1YWJkNjBhMzJkZTU4Y2ZkYTZhNzk4YTcwYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../protos/google/cloud/bigquery/storage/v1/annotations.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/arrow.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/avro.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/protobuf.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/storage.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/stream.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/table.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1beta1/arrow.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1beta1/avro.proto | 2 +- .../google/cloud/bigquery/storage/v1beta1/read_options.proto | 2 +- .../google/cloud/bigquery/storage/v1beta1/storage.proto | 2 +- .../cloud/bigquery/storage/v1beta1/table_reference.proto | 2 +- handwritten/bigquery-storage/protos/protos.json | 4 ++-- 13 files changed, 14 insertions(+), 14 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto index 1627fd12a0c..5ea5016dfd4 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/annotations.proto @@ -5,7 +5,7 @@ package google.cloud.bigquery.storage.v1; import "google/protobuf/descriptor.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_package = "com.google.cloud.bigquery.storage.v1"; option java_multiple_files = true; option java_outer_classname = "AnnotationsProto"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 6d3f6080bf6..4ac268c8b02 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "ArrowProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index e1ecb667b61..52441e9fcf9 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "AvroProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto index b3754acf7b3..05ac778f03d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -19,7 +19,7 @@ package google.cloud.bigquery.storage.v1; import "google/protobuf/descriptor.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "ProtoBufProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 85daf6dfa22..e84a58df778 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -30,7 +30,7 @@ import "google/protobuf/wrappers.proto"; import "google/rpc/status.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "StorageProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index ec137de19dd..85f6dd82575 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -24,7 +24,7 @@ import "google/cloud/bigquery/storage/v1/table.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "StreamProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 57e79334247..56f7852a194 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -19,7 +19,7 @@ package google.cloud.bigquery.storage.v1; import "google/api/field_behavior.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; option java_multiple_files = true; option java_outer_classname = "TableProto"; option java_package = "com.google.cloud.bigquery.storage.v1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto index f70c61c7246..77dbfe3eb26 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb"; option java_outer_classname = "ArrowProto"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto index 7d034a28a7e..222bdcc13f5 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb"; option java_outer_classname = "AvroProto"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 1ff8d8b5eb6..90fb7f3e492 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; // Options dictating how we read a table. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 0d311418a49..2bf2a2c094d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -27,7 +27,7 @@ import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; // BigQuery storage API. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index 22c940c0e6b..9f643cd65e6 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -18,7 +18,7 @@ package google.cloud.bigquery.storage.v1beta1; import "google/protobuf/timestamp.proto"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb"; option java_outer_classname = "TableReferenceProto"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 7b61acc3084..5b394ec839c 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -11,7 +11,7 @@ "v1": { "options": { "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1", - "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1;storage", + "go_package": "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb", "java_package": "com.google.cloud.bigquery.storage.v1", "java_multiple_files": true, "java_outer_classname": "TableProto", @@ -1084,7 +1084,7 @@ }, "v1beta1": { "options": { - "go_package": "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage", + "go_package": "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb", "java_outer_classname": "TableReferenceProto", "java_package": "com.google.cloud.bigquery.storage.v1beta1" }, From 8a886d1b6d4f7550794cf42e36a8d0ce4836a691 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Feb 2023 12:10:15 -0800 Subject: [PATCH 212/333] chore(gitignore): only ignore folders in the top level (#321) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update .gitignore to always include protos folder Use gapic-generator-typescript v3.0.0. PiperOrigin-RevId: 507004755 Source-Link: https://github.com/googleapis/googleapis/commit/d784f3c1043616fc0646e9ce7afa1b9161cc02de Source-Link: https://github.com/googleapis/googleapis-gen/commit/5e64ba8615f65fdedb1fcd6ac792e5ea621027e4 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWU2NGJhODYxNWY2NWZkZWRiMWZjZDZhYzc5MmU1ZWE2MjEwMjdlNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore(gitignore): only ignore folders in the top level PiperOrigin-RevId: 507603203 Source-Link: https://github.com/googleapis/googleapis/commit/a4f2de456480c0a4ed9feeeaa1f8ee620bbef23a Source-Link: https://github.com/googleapis/googleapis-gen/commit/dcf882154e7c710ecf2a1abc77b35c95f9062371 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGNmODgyMTU0ZTdjNzEwZWNmMmExYWJjNzdiMzVjOTVmOTA2MjM3MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.gitignore | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/.gitignore b/handwritten/bigquery-storage/.gitignore index 5d32b23782f..d4f03a0df2e 100644 --- a/handwritten/bigquery-storage/.gitignore +++ b/handwritten/bigquery-storage/.gitignore @@ -1,11 +1,11 @@ **/*.log **/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ +/.coverage +/coverage +/.nyc_output +/docs/ +/out/ +/build/ system-test/secrets.js system-test/*key.json *.lock From 071d141a2c17c7d83afdf990ba458ba62baaff77 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Feb 2023 16:16:53 -0800 Subject: [PATCH 213/333] feat: add default_value_expression to TableFieldSchema (#323) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add default_value_expression to TableFieldSchema PiperOrigin-RevId: 508443364 Source-Link: https://github.com/googleapis/googleapis/commit/fc7d513461f555d3de13d7a777b2f998cf8f5532 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6d4bae6a1c630c31c45b0a6d1c50d69b5dc87881 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmQ0YmFlNmExYzYzMGMzMWM0NWIwYTZkMWM1MGQ2OWI1ZGM4Nzg4MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/table.proto | 4 ++++ .../bigquery-storage/protos/protos.d.ts | 6 +++++ handwritten/bigquery-storage/protos/protos.js | 23 +++++++++++++++++++ .../bigquery-storage/protos/protos.json | 7 ++++++ 4 files changed, 40 insertions(+) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 56f7852a194..c9b62d7932e 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -164,4 +164,8 @@ message TableFieldSchema { // Optional. See documentation for precision. int64 scale = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A SQL expression to specify the [default value] + // (https://cloud.google.com/bigquery/docs/default-values) for this field. + string default_value_expression = 10 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 399d28bdd8e..0056863719b 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4206,6 +4206,9 @@ export namespace google { /** TableFieldSchema scale */ scale?: (number|Long|string|null); + + /** TableFieldSchema defaultValueExpression */ + defaultValueExpression?: (string|null); } /** Represents a TableFieldSchema. */ @@ -4241,6 +4244,9 @@ export namespace google { /** TableFieldSchema scale. */ public scale: (number|Long|string); + /** TableFieldSchema defaultValueExpression. */ + public defaultValueExpression: string; + /** * Creates a new TableFieldSchema instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 730966bc2c4..71d111aeb5d 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -10009,6 +10009,7 @@ * @property {number|Long|null} [maxLength] TableFieldSchema maxLength * @property {number|Long|null} [precision] TableFieldSchema precision * @property {number|Long|null} [scale] TableFieldSchema scale + * @property {string|null} [defaultValueExpression] TableFieldSchema defaultValueExpression */ /** @@ -10091,6 +10092,14 @@ */ TableFieldSchema.prototype.scale = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** + * TableFieldSchema defaultValueExpression. + * @member {string} defaultValueExpression + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.defaultValueExpression = ""; + /** * Creates a new TableFieldSchema instance using the specified properties. * @function create @@ -10132,6 +10141,8 @@ writer.uint32(/* id 8, wireType 0 =*/64).int64(message.precision); if (message.scale != null && Object.hasOwnProperty.call(message, "scale")) writer.uint32(/* id 9, wireType 0 =*/72).int64(message.scale); + if (message.defaultValueExpression != null && Object.hasOwnProperty.call(message, "defaultValueExpression")) + writer.uint32(/* id 10, wireType 2 =*/82).string(message.defaultValueExpression); return writer; }; @@ -10200,6 +10211,10 @@ message.scale = reader.int64(); break; } + case 10: { + message.defaultValueExpression = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -10291,6 +10306,9 @@ if (message.scale != null && message.hasOwnProperty("scale")) if (!$util.isInteger(message.scale) && !(message.scale && $util.isInteger(message.scale.low) && $util.isInteger(message.scale.high))) return "scale: integer|Long expected"; + if (message.defaultValueExpression != null && message.hasOwnProperty("defaultValueExpression")) + if (!$util.isString(message.defaultValueExpression)) + return "defaultValueExpression: string expected"; return null; }; @@ -10443,6 +10461,8 @@ message.scale = object.scale; else if (typeof object.scale === "object") message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); + if (object.defaultValueExpression != null) + message.defaultValueExpression = String(object.defaultValueExpression); return message; }; @@ -10481,6 +10501,7 @@ object.scale = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else object.scale = options.longs === String ? "0" : 0; + object.defaultValueExpression = ""; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -10510,6 +10531,8 @@ object.scale = options.longs === String ? String(message.scale) : message.scale; else object.scale = options.longs === String ? $util.Long.prototype.toString.call(message.scale) : options.longs === Number ? new $util.LongBits(message.scale.low >>> 0, message.scale.high >>> 0).toNumber() : message.scale; + if (message.defaultValueExpression != null && message.hasOwnProperty("defaultValueExpression")) + object.defaultValueExpression = message.defaultValueExpression; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 5b394ec839c..753ce42499c 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1047,6 +1047,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "defaultValueExpression": { + "type": "string", + "id": 10, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } }, "nested": { From 289ddd7b5b18c96eb885fe6c246aa139bd34cff2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 21 Feb 2023 21:26:49 -0800 Subject: [PATCH 214/333] docs: changing format of the jsdoc links (#325) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: changing format of the jsdoc links PiperOrigin-RevId: 509352615 Source-Link: https://github.com/googleapis/googleapis/commit/b737d30dae27222d86fa340ecb99292df4585762 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8efadf3d58780ea1c550268d46a3dc701ba37fcf Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGVmYWRmM2Q1ODc4MGVhMWM1NTAyNjhkNDZhM2RjNzAxYmEzN2ZjZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Daniel Bankhead --- .../src/v1/big_query_read_client.ts | 6 +++--- .../src/v1/big_query_write_client.ts | 14 +++++++------- .../src/v1beta1/big_query_storage_client.ts | 10 +++++----- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 54c280fd703..92513560075 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -403,7 +403,7 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1.ReadSession}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.ReadSession | ReadSession}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -519,7 +519,7 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse | SplitReadStreamResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -627,7 +627,7 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} - * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1.ReadRowsResponse} on 'data' event. + * An object stream which emits {@link google.cloud.bigquery.storage.v1.ReadRowsResponse | ReadRowsResponse} on 'data' event. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) * for more details and examples. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 2743d37054b..225fe7a107e 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -377,7 +377,7 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.WriteStream | WriteStream}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -478,7 +478,7 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WriteStream]{@link google.cloud.bigquery.storage.v1.WriteStream}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.WriteStream | WriteStream}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -577,7 +577,7 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FinalizeWriteStreamResponse]{@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse | FinalizeWriteStreamResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -682,7 +682,7 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCommitWriteStreamsResponse]{@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse | BatchCommitWriteStreamsResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -793,7 +793,7 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [FlushRowsResponse]{@link google.cloud.bigquery.storage.v1.FlushRowsResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.FlushRowsResponse | FlushRowsResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -912,8 +912,8 @@ export class BigQueryWriteClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which is both readable and writable. It accepts objects - * representing [AppendRowsRequest]{@link google.cloud.bigquery.storage.v1.AppendRowsRequest} for write() method, and - * will emit objects representing [AppendRowsResponse]{@link google.cloud.bigquery.storage.v1.AppendRowsResponse} on 'data' event asynchronously. + * representing {@link google.cloud.bigquery.storage.v1.AppendRowsRequest | AppendRowsRequest} for write() method, and + * will emit objects representing {@link google.cloud.bigquery.storage.v1.AppendRowsResponse | AppendRowsResponse} on 'data' event asynchronously. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) * for more details and examples. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 29b8e225100..7fa7c8e90fe 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -391,7 +391,7 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [ReadSession]{@link google.cloud.bigquery.storage.v1beta1.ReadSession}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.ReadSession | ReadSession}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -496,7 +496,7 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [BatchCreateReadSessionStreamsResponse]{@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse | BatchCreateReadSessionStreamsResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -610,7 +610,7 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * The first element of the array is an object representing {@link google.protobuf.Empty | Empty}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -727,7 +727,7 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [SplitReadStreamResponse]{@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse}. + * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse | SplitReadStreamResponse}. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) * for more details and examples. @@ -835,7 +835,7 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} - * An object stream which emits [ReadRowsResponse]{@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse} on 'data' event. + * An object stream which emits {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse | ReadRowsResponse} on 'data' event. * Please see the * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) * for more details and examples. From 58d2c81d2c4547d34549ff8f1fc9c9d3bb5868f8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 11:55:15 -0700 Subject: [PATCH 215/333] chore: store nodejs build artifacts in placer (#329) Source-Link: https://github.com/googleapis/synthtool/commit/3602660ae703daadcb7bc2f87bf601241665f3f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:e6d785d6de3cab027f6213d95ccedab4cab3811b0d3172b78db2216faa182e32 Co-authored-by: Owl Bot --- .../bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/.kokoro/publish.sh | 14 +++++++++++++- .../bigquery-storage/.kokoro/release/publish.cfg | 12 ++++++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 788f7a9fdff..0b836e11907 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:fe04ae044dadf5ad88d979dbcc85e0e99372fb5d6316790341e6aca5e4e3fbc8 + digest: sha256:e6d785d6de3cab027f6213d95ccedab4cab3811b0d3172b78db2216faa182e32 diff --git a/handwritten/bigquery-storage/.kokoro/publish.sh b/handwritten/bigquery-storage/.kokoro/publish.sh index 949e3e1d0c2..ca1d47af347 100755 --- a/handwritten/bigquery-storage/.kokoro/publish.sh +++ b/handwritten/bigquery-storage/.kokoro/publish.sh @@ -27,4 +27,16 @@ NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc npm install -npm publish --access=public --registry=https://wombat-dressing-room.appspot.com +npm pack . +# npm provides no way to specify, observe, or predict the name of the tarball +# file it generates. We have to look in the current directory for the freshest +# .tgz file. +TARBALL=$(ls -1 -t *.tgz | head -1) + +npm publish --access=public --registry=https://wombat-dressing-room.appspot.com "$TARBALL" + +# Kokoro collects *.tgz and package-lock.json files and stores them in Placer +# so we can generate SBOMs and attestations. +# However, we *don't* want Kokoro to collect package-lock.json and *.tgz files +# that happened to be installed with dependencies. +find node_modules -name package-lock.json -o -name "*.tgz" | xargs rm -f \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index ba6547f468f..2a5fbd3f7c9 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -37,3 +37,15 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/nodejs-bigquery-storage/.kokoro/publish.sh" } + +# Store the packages we uploaded to npmjs.org and their corresponding +# package-lock.jsons in Placer. That way, we have a record of exactly +# what we published, and which version of which tools we used to publish +# it, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/**/*.tgz" + regex: "github/**/package-lock.json" + strip_prefix: "github" + } +} From e8f5e88aecc8cf3786ae6be5fba973d5b1eb3c6b Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 22 Mar 2023 14:17:16 -0700 Subject: [PATCH 216/333] feat: adapt package to convert TableSchema to ProtoDescriptor (#326) --- handwritten/bigquery-storage/.gitignore | 1 + handwritten/bigquery-storage/README.md | 1 + .../bigquery-storage/src/adapt/index.ts | 20 + .../bigquery-storage/src/adapt/proto.ts | 275 +++++++++++ .../src/adapt/proto_mappings.ts | 113 +++++ .../bigquery-storage/src/adapt/schema.ts | 113 +++++ .../src/adapt/schema_mappings.ts | 63 +++ handwritten/bigquery-storage/src/index.ts | 2 + .../bigquery-storage/test/adapt/proto.ts | 442 ++++++++++++++++++ .../bigquery-storage/test/adapt/schema.ts | 156 +++++++ 10 files changed, 1186 insertions(+) create mode 100644 handwritten/bigquery-storage/src/adapt/index.ts create mode 100644 handwritten/bigquery-storage/src/adapt/proto.ts create mode 100644 handwritten/bigquery-storage/src/adapt/proto_mappings.ts create mode 100644 handwritten/bigquery-storage/src/adapt/schema.ts create mode 100644 handwritten/bigquery-storage/src/adapt/schema_mappings.ts create mode 100644 handwritten/bigquery-storage/test/adapt/proto.ts create mode 100644 handwritten/bigquery-storage/test/adapt/schema.ts diff --git a/handwritten/bigquery-storage/.gitignore b/handwritten/bigquery-storage/.gitignore index d4f03a0df2e..013e7d2ee2f 100644 --- a/handwritten/bigquery-storage/.gitignore +++ b/handwritten/bigquery-storage/.gitignore @@ -1,6 +1,7 @@ **/*.log **/node_modules /.coverage +samples/.coverage /coverage /.nyc_output /docs/ diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index a83b819122b..d7dccea8252 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -187,6 +187,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | --------------------------- | --------------------------------- | ------ | | Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | | Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | +| Append_rows_table_to_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_table_to_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_table_to_proto2.js,samples/README.md) | | Customer_record_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/customer_record_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/customer_record_pb.js,samples/README.md) | | BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | | Sample_data_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb.js,samples/README.md) | diff --git a/handwritten/bigquery-storage/src/adapt/index.ts b/handwritten/bigquery-storage/src/adapt/index.ts new file mode 100644 index 00000000000..8a04df1e152 --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/index.ts @@ -0,0 +1,20 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +export { + convertStorageSchemaToProto2Descriptor, + normalizeDescriptor, +} from './proto'; + +export {convertBigQuerySchemaToStorageTableSchema} from './schema'; diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts new file mode 100644 index 00000000000..0602d7d1293 --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -0,0 +1,275 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; +import {bqTypeToFieldTypeMap, convertModeToLabel} from './proto_mappings'; + +type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; +type TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.ITableFieldSchema; +type FieldDescriptorProto = protos.google.protobuf.IFieldDescriptorProto; +type FileDescriptorProto = protos.google.protobuf.IFileDescriptorProto; +type FileDescriptorSet = protos.google.protobuf.FileDescriptorSet; +type DescriptorProto = protos.google.protobuf.DescriptorProto; +type FieldDescriptorProtoType = + protos.google.protobuf.IFieldDescriptorProto['type']; +type FieldDescriptorProtoLabel = + protos.google.protobuf.IFieldDescriptorProto['label']; + +const TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema; +const DescriptorProto = protos.google.protobuf.DescriptorProto; +const FieldDescriptorProto = protos.google.protobuf.FieldDescriptorProto; +const FileDescriptorProto = protos.google.protobuf.FileDescriptorProto; +const FileDescriptorSet = protos.google.protobuf.FileDescriptorSet; + +const packedTypes: FieldDescriptorProtoType[] = [ + FieldDescriptorProto.Type.TYPE_INT32, + FieldDescriptorProto.Type.TYPE_INT64, + FieldDescriptorProto.Type.TYPE_UINT32, + FieldDescriptorProto.Type.TYPE_UINT64, + FieldDescriptorProto.Type.TYPE_SINT32, + FieldDescriptorProto.Type.TYPE_SINT64, + FieldDescriptorProto.Type.TYPE_FIXED32, + FieldDescriptorProto.Type.TYPE_FIXED64, + FieldDescriptorProto.Type.TYPE_SFIXED32, + FieldDescriptorProto.Type.TYPE_SFIXED64, + FieldDescriptorProto.Type.TYPE_FLOAT, + FieldDescriptorProto.Type.TYPE_DOUBLE, + FieldDescriptorProto.Type.TYPE_BOOL, + FieldDescriptorProto.Type.TYPE_ENUM, +]; + +/** Builds a DescriptorProto for a given table schema using proto2 syntax. + * @param schema - a BigQuery Storage TableSchema. + * @param scope - scope to namespace protobuf structs. + * @returns DescriptorProto + */ +export function convertStorageSchemaToProto2Descriptor( + schema: TableSchema, + scope: string +): DescriptorProto { + const fds = convertStorageSchemaToFileDescriptorInternal( + schema, + scope, + false + ); + return normalizeDescriptorSet(fds); +} + +/** Builds a DescriptorProto for a given table schema using proto3 syntax. + * @param schema - a Bigquery TableSchema. + * @param scope - scope to namespace protobuf structs. + * @returns DescriptorProto + */ +export function convertStorageSchemaToProto3Descriptor( + schema: TableSchema, + scope: string +): DescriptorProto { + const fds = convertStorageSchemaToFileDescriptorInternal(schema, scope, true); + return normalizeDescriptorSet(fds); +} + +function convertStorageSchemaToFileDescriptorInternal( + schema: TableSchema, + scope: string, + useProto3: boolean +): FileDescriptorSet { + let fNumber = 0; + const fields: FieldDescriptorProto[] = []; + const deps = new Map(); + for (const field of schema.fields ?? []) { + fNumber += 1; + const currentScope = `${scope}_${field.name}`; + if (field.type === TableFieldSchema.Type.STRUCT) { + const subSchema: TableSchema = { + fields: field.fields, + }; + const fd = convertStorageSchemaToFileDescriptorInternal( + subSchema, + currentScope, + useProto3 + ); + for (const f of fd.file) { + if (f.name) { + deps.set(f.name, f); + } + } + const fdp = convertTableFieldSchemaToFieldDescriptorProto( + field, + fNumber, + currentScope, + useProto3 + ); + fields.push(fdp); + } else { + const fdp = convertTableFieldSchemaToFieldDescriptorProto( + field, + fNumber, + currentScope, + useProto3 + ); + fields.push(fdp); + } + } + + const dp = new DescriptorProto({ + name: scope, + field: fields, + }); + + const depsNames: string[] = Array.from(deps.keys()); + const syntax = useProto3 ? 'proto3' : 'proto2'; + const fdp = new FileDescriptorProto({ + messageType: [dp], + name: `${scope}.proto`, + syntax, + dependency: depsNames, + }); + + const fds = new FileDescriptorSet({ + file: [fdp, ...Array.from(deps.values())], + }); + + return fds; +} + +function normalizeDescriptorSet(fds: FileDescriptorSet): DescriptorProto { + let dp: DescriptorProto | null = null; + let fdpName; + if (fds.file.length > 0) { + // search root descriptor + const fdp = fds.file[0]; + fdpName = fdp.name; + if (fdp.messageType && fdp.messageType.length > 0) { + dp = new DescriptorProto(fdp.messageType[0]); + } + } + if (!dp) { + throw Error('root descriptor not found'); + } + for (const fdp of fds.file) { + if (fdp.name === fdpName) { + continue; + } + if (!dp.nestedType) { + dp.nestedType = []; + } + if (!fdp.messageType) { + continue; + } + for (const nestedDP of fdp.messageType) { + dp.nestedType.push(normalizeDescriptor(new DescriptorProto(nestedDP))); + } + } + return normalizeDescriptor(dp); +} + +/** + * Builds a self-contained DescriptorProto suitable for communicating schema + * information with the BigQuery Storage write API. It's primarily used for cases where users are + * interested in sending data using a predefined protocol buffer message. + * @param dp - DescriptorProto to be bundled. + * @return DescriptorProto + */ +export function normalizeDescriptor(dp: DescriptorProto): DescriptorProto { + dp.name = normalizeName(dp.name); + for (const f of dp.field) { + if (f.proto3Optional) { + f.proto3Optional = null; + } + if (f.oneofIndex) { + f.oneofIndex = null; + } + if (f.options) { + f.options.packed = shouldPackType(f.type, f.label, false); + } + } + const normalizedNestedTypes = []; + for (const nestedDP of dp.nestedType) { + normalizedNestedTypes.push( + normalizeDescriptor(new DescriptorProto(nestedDP)) + ); + } + dp.nestedType = normalizedNestedTypes; + return dp; +} + +function normalizeName(name: string): string { + return name.replace(/\./, '_'); +} + +function convertTableFieldSchemaToFieldDescriptorProto( + field: TableFieldSchema, + fNumber: number, + scope: string, + useProto3: boolean +): FieldDescriptorProto { + const name = `${field.name}`.toLowerCase(); + const type = field.type; + if (!type) { + throw Error(`table field ${name} missing type`); + } + const label = convertModeToLabel(field.mode, useProto3); + let fdp: FieldDescriptorProto; + if (type === TableFieldSchema.Type.STRUCT) { + fdp = new FieldDescriptorProto({ + name: name, + number: fNumber, + type: FieldDescriptorProto.Type.TYPE_MESSAGE, + typeName: scope, + label: label, + }); + } else { + const pType = bqTypeToFieldTypeMap[type]; + if (pType === null) { + throw Error(`table field type ${type} not supported`); + } + fdp = new FieldDescriptorProto({ + name: field.name, + number: fNumber, + type: pType, + label: label, + options: { + packed: shouldPackType(pType, label, useProto3), + }, + proto3Optional: isProto3Optional(label, useProto3), + }); + } + return fdp; +} + +function shouldPackType( + t: FieldDescriptorProtoType, + label: FieldDescriptorProtoLabel | null, + useProto3: boolean +): boolean | undefined { + if (useProto3) { + return false; + } + if (label !== FieldDescriptorProto.Label.LABEL_REPEATED) { + return undefined; + } + return packedTypes.includes(t); +} + +function isProto3Optional( + label: FieldDescriptorProtoLabel | null, + useProto3: boolean +): boolean | undefined { + if (!useProto3) { + return undefined; + } + return label === FieldDescriptorProto.Label.LABEL_OPTIONAL; +} diff --git a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts new file mode 100644 index 00000000000..b0f7d90e15a --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts @@ -0,0 +1,113 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; + +type TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.ITableFieldSchema; +type FieldDescriptorProto = protos.google.protobuf.IFieldDescriptorProto; +type FieldDescriptorProtoType = + protos.google.protobuf.FieldDescriptorProto['type']; +type TableFieldSchemaType = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema['type']; +type TableFieldSchemaMode = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema['mode']; +type FieldDescriptorProtoLabel = + protos.google.protobuf.FieldDescriptorProto.Label; + +const TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema; +const FieldDescriptorProto = protos.google.protobuf.FieldDescriptorProto; + +// Reference https://cloud.google.com/bigquery/docs/write-api#data_type_conversions +export const bqTypeToFieldTypeMap: Record< + TableFieldSchemaType, + FieldDescriptorProtoType | null +> = { + [TableFieldSchema.Type.BIGNUMERIC]: FieldDescriptorProto.Type.TYPE_STRING, + BIGNUMERIC: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.BOOL]: FieldDescriptorProto.Type.TYPE_BOOL, + BOOL: FieldDescriptorProto.Type.TYPE_BOOL, + [TableFieldSchema.Type.BYTES]: FieldDescriptorProto.Type.TYPE_BYTES, + BYTES: FieldDescriptorProto.Type.TYPE_BYTES, + [TableFieldSchema.Type.DATE]: FieldDescriptorProto.Type.TYPE_INT32, + DATE: FieldDescriptorProto.Type.TYPE_INT32, + [TableFieldSchema.Type.DATETIME]: FieldDescriptorProto.Type.TYPE_STRING, + DATETIME: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.DOUBLE]: FieldDescriptorProto.Type.TYPE_DOUBLE, + DOUBLE: FieldDescriptorProto.Type.TYPE_DOUBLE, + [TableFieldSchema.Type.GEOGRAPHY]: FieldDescriptorProto.Type.TYPE_STRING, + GEOGRAPHY: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.INT64]: FieldDescriptorProto.Type.TYPE_INT64, + INT64: FieldDescriptorProto.Type.TYPE_INT64, + [TableFieldSchema.Type.NUMERIC]: FieldDescriptorProto.Type.TYPE_STRING, + NUMERIC: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.STRING]: FieldDescriptorProto.Type.TYPE_STRING, + STRING: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.STRUCT]: FieldDescriptorProto.Type.TYPE_MESSAGE, + STRUCT: FieldDescriptorProto.Type.TYPE_MESSAGE, + [TableFieldSchema.Type.TIME]: FieldDescriptorProto.Type.TYPE_STRING, + TIME: FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.TIMESTAMP]: FieldDescriptorProto.Type.TYPE_INT64, + TIMESTAMP: FieldDescriptorProto.Type.TYPE_INT64, + [TableFieldSchema.Type.JSON]: FieldDescriptorProto.Type.TYPE_STRING, + JSON: protos.google.protobuf.FieldDescriptorProto.Type.TYPE_STRING, + [TableFieldSchema.Type.TYPE_UNSPECIFIED]: null, + TYPE_UNSPECIFIED: null, + [TableFieldSchema.Type.INTERVAL]: null, + INTERVAL: null, +}; + +export const bqModeToFieldLabelMapProto2: Record< + TableFieldSchemaMode, + FieldDescriptorProtoLabel | null +> = { + [TableFieldSchema.Mode.NULLABLE]: FieldDescriptorProto.Label.LABEL_OPTIONAL, + NULLABLE: FieldDescriptorProto.Label.LABEL_OPTIONAL, + [TableFieldSchema.Mode.REPEATED]: FieldDescriptorProto.Label.LABEL_REPEATED, + REPEATED: FieldDescriptorProto.Label.LABEL_REPEATED, + [TableFieldSchema.Mode.REQUIRED]: FieldDescriptorProto.Label.LABEL_REQUIRED, + REQUIRED: FieldDescriptorProto.Label.LABEL_REQUIRED, + [TableFieldSchema.Mode.MODE_UNSPECIFIED]: null, + MODE_UNSPECIFIED: null, +}; + +export const bqModeToFieldLabelMapProto3: Record< + TableFieldSchemaMode, + FieldDescriptorProtoLabel | null +> = { + [TableFieldSchema.Mode.NULLABLE]: FieldDescriptorProto.Label.LABEL_OPTIONAL, + NULLABLE: FieldDescriptorProto.Label.LABEL_OPTIONAL, + [protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.REPEATED]: + FieldDescriptorProto.Label.LABEL_REPEATED, + REPEATED: FieldDescriptorProto.Label.LABEL_REPEATED, + [protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.REQUIRED]: + FieldDescriptorProto.Label.LABEL_OPTIONAL, + REQUIRED: FieldDescriptorProto.Label.LABEL_REQUIRED, + [protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode + .MODE_UNSPECIFIED]: null, + MODE_UNSPECIFIED: null, +}; + +export function convertModeToLabel( + mode: TableFieldSchema['mode'], + useProto3: Boolean +): FieldDescriptorProtoLabel | null { + if (!mode) { + return null; + } + return useProto3 + ? bqModeToFieldLabelMapProto3[mode] + : bqModeToFieldLabelMapProto2[mode]; +} diff --git a/handwritten/bigquery-storage/src/adapt/schema.ts b/handwritten/bigquery-storage/src/adapt/schema.ts new file mode 100644 index 00000000000..cd276aa2365 --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/schema.ts @@ -0,0 +1,113 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; +import {fieldTypeMap, modeMap} from './schema_mappings'; + +type ITableSchema = { + /** + * Describes the fields in a table. + */ + fields?: Array; +}; +type ITableFieldSchema = { + /** + * [Optional] The field description. The maximum length is 1,024 characters. + */ + description?: string; + /** + * [Optional] Describes the nested schema fields if the type property is set to RECORD. + */ + fields?: Array; + /** + * [Optional] The field mode. Possible values include NULLABLE, REQUIRED and REPEATED. The default value is NULLABLE. + */ + mode?: string; + /** + * [Required] The field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a letter or underscore. The maximum length is 300 characters. + */ + name?: string; + /** + * [Required] The field data type. Possible values include STRING, BYTES, INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), NUMERIC, BIGNUMERIC, BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, INTERVAL, RECORD (where RECORD indicates that the field contains a nested schema) or STRUCT (same as RECORD). + */ + type?: string; +}; +type StorageTableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; +type StorageTableField = + protos.google.cloud.bigquery.storage.v1.ITableFieldSchema; + +const StorageTableSchema = protos.google.cloud.bigquery.storage.v1.TableSchema; +const StorageTableField = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema; + +/** + * Converts a bigquery Schema into the protobuf-based TableSchema used + * by the BigQuery Storage WriteClient. + * @param schema - a BigQuery TableSchema + * @return StorageTableSchema + */ +export function convertBigQuerySchemaToStorageTableSchema( + schema: ITableSchema +): StorageTableSchema { + const out: StorageTableSchema = {}; + for (const field of schema.fields ?? []) { + const converted = bqFieldToStorageField(field); + if (!converted) { + throw Error(`failed to convert field ${field.name}`); + } + if (!out.fields) { + out.fields = []; + } + out.fields.push(converted); + } + return out; +} + +function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { + const out: StorageTableField = { + name: field.name, + }; + + if (field.description) { + out.description = field.description; + } + + if (!field.type) { + throw Error( + `could not convert field (${field.name}) due to unknown type value: ${field.type}` + ); + } + + const ftype = fieldTypeMap[field.type]; + if (!ftype) { + throw Error( + `could not convert field (${field.name}) due to unknown type value: ${field.type}` + ); + } + out.type = ftype; + + out.mode = StorageTableField.Mode.NULLABLE; + if (field.mode) { + out.mode = modeMap[field.mode]; + } + + for (const subField of field.fields ?? []) { + const converted = bqFieldToStorageField(subField); + if (!out.fields) { + out.fields = []; + } + out.fields.push(converted); + } + return out; +} diff --git a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts new file mode 100644 index 00000000000..040d631c666 --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts @@ -0,0 +1,63 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; +type StorageTableField = + protos.google.cloud.bigquery.storage.v1.ITableFieldSchema; +type StorageTableFieldType = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type; + +const StorageTableField = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema; +const StorageTableFieldType = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type; + +export const fieldTypeMap: Record = { + STRING: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.STRING, + BYTES: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.BYTES, + INTEGER: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.INT64, + INT64: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.INT64, + FLOAT: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.DOUBLE, + FLOAT64: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.DOUBLE, + NUMERIC: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.NUMERIC, + BIGNUMERIC: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.BIGNUMERIC, + BOOLEAN: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.BOOL, + BOOL: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.BOOL, + TIMESTAMP: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.TIMESTAMP, + DATE: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.DATE, + TIME: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.TIME, + DATETIME: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.DATETIME, + INTERVAL: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.INTERVAL, + RECORD: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.STRUCT, + STRUCT: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.STRUCT, + JSON: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.JSON, + GEOGRAPHY: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.GEOGRAPHY, +}; + +export const modeMap: Record = { + NULLABLE: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.NULLABLE, + REPEATED: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.REPEATED, + REQUIRED: + protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.REQUIRED, + '': protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode + .MODE_UNSPECIFIED, +}; diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 8d6b763438d..cbf98da7c50 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -36,3 +36,5 @@ export { export default {v1, BigQueryReadClient, BigQueryWriteClient}; import * as protos from '../protos/protos'; export {protos}; +import * as adapt from './adapt'; +export {adapt}; diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts new file mode 100644 index 00000000000..0c5c5ff605a --- /dev/null +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -0,0 +1,442 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +import {protobuf} from 'google-gax'; +import * as adapt from '../../src/adapt'; +import * as messagesJSON from '../../samples/testdata/messages.json'; +import * as protos from '../../protos/protos'; + +const DescriptorProto = protos.google.protobuf.DescriptorProto; +const {Root, Type} = protobuf; + +describe('Adapt Protos', () => { + describe('Schema to Proto Descriptor conversion', () => { + it('basic', () => { + const schema = { + fields: [ + { + name: 'foo', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'bar', + type: 'FLOAT', + mode: 'REQUIRED', + }, + { + name: 'baz', + type: 'STRING', + mode: 'REPEATED', + }, + { + name: 'bat', + type: 'BOOL', + mode: 'REPEATED', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Test' + ); + assert.notEqual(protoDescriptor, null); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + const TestProto = (Type as any).fromDescriptor(protoDescriptor); + const raw = { + foo: 'name', + bar: 42, + baz: ['A', 'B'], + bat: [true, false], + }; + const serialized = TestProto.encode(raw).finish(); + const decoded = TestProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); + + it('nested struct', () => { + const schema = { + fields: [ + { + name: 'record_id', + type: 'INT64', + mode: 'NULLABLE', + }, + { + name: 'details', + type: 'STRUCT', + mode: 'REPEATED', + fields: [ + { + name: 'key', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'value', + type: 'STRING', + mode: 'NULLABLE', + }, + ], + }, + { + name: 'metadata', + type: 'STRUCT', + mode: 'NULLABLE', + fields: [ + { + name: 'createdAt', + type: 'TIMESTAMP', + mode: 'REQUIRED', + }, + { + name: 'updatedAt', + type: 'TIMESTAMP', + mode: 'NULLABLE', + }, + ], + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Nested' + ); + assert.notEqual(protoDescriptor, null); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + assert.deepEqual(JSON.parse(JSON.stringify(protoDescriptor)), { + name: 'Nested', + field: [ + { + name: 'record_id', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: {}, + }, + { + name: 'details', + number: 2, + label: 'LABEL_REPEATED', + type: 'TYPE_MESSAGE', + typeName: 'Nested_details', + }, + { + name: 'metadata', + number: 3, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'Nested_metadata', + }, + ], + nestedType: [ + { + name: 'Nested_details', + field: [ + { + name: 'key', + number: 1, + label: 'LABEL_REQUIRED', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'value', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + ], + }, + { + name: 'Nested_metadata', + field: [ + { + name: 'createdAt', + number: 1, + label: 'LABEL_REQUIRED', + type: 'TYPE_INT64', + options: {}, + }, + { + name: 'updatedAt', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: {}, + }, + ], + }, + ], + }); + const NestedProto = (Type as any).fromDescriptor(protoDescriptor); + const raw = { + record_id: '12345', + details: [ + {key: 'name', value: 'jimmy'}, + {key: 'title', value: 'clown'}, + ], + metadata: { + createdAt: Date.now(), + updatedAt: Date.now(), + }, + }; + const serialized = NestedProto.encode(raw).finish(); + const decoded = NestedProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); + }); + + describe('Proto descriptor normalization', () => { + it('bundle multiple proto descriptors into one', () => { + const root = Root.fromJSON(messagesJSON).resolveAll(); + const descriptor = root + .lookupType('testdata.GithubArchiveMessage') + .add(root.lookupType('testdata.GithubArchiveRepo')) + .add(root.lookupType('testdata.GithubArchiveEntity')) + .toDescriptor('proto2'); + const normalized = adapt + .normalizeDescriptor(new DescriptorProto(descriptor)) + .toJSON(); + assert.deepEqual(normalized, { + name: 'GithubArchiveMessage', + field: [ + { + name: 'type', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'public', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_BOOL', + options: {}, + }, + { + name: 'payload', + number: 3, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'repo', + number: 4, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'GithubArchiveRepo', + options: {}, + }, + { + name: 'actor', + number: 5, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'GithubArchiveEntity', + options: {}, + }, + { + name: 'org', + number: 6, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'GithubArchiveEntity', + options: {}, + }, + { + name: 'created_at', + number: 7, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: {}, + }, + { + name: 'id', + number: 8, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'other', + number: 9, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + ], + nestedType: [ + { + name: 'GithubArchiveRepo', + field: [ + { + name: 'id', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: {}, + }, + { + name: 'name', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'url', + number: 3, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + ], + }, + { + name: 'GithubArchiveEntity', + field: [ + { + name: 'id', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: {}, + }, + { + name: 'login', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'gravatar_id', + number: 3, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'avatar_url', + number: 4, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'url', + number: 5, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + ], + }, + ], + }); + }); + it('nested proto with enum', () => { + const root = Root.fromJSON(messagesJSON).resolveAll(); + const descriptor = root + .lookupType('testdata.ExternalEnumMessage') + .add(root.lookupType('testdata.EnumMsgA')) + .add(root.lookupType('testdata.EnumMsgB')) + .add(root.lookupEnum('testdata.ExtEnum')) + .toDescriptor('proto2'); + const normalized = adapt + .normalizeDescriptor(new DescriptorProto(descriptor)) + .toJSON(); + assert.deepEqual(normalized, { + name: 'ExternalEnumMessage', + field: [ + { + name: 'msg_a', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'EnumMsgA', + options: {}, + }, + { + name: 'msg_b', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_MESSAGE', + typeName: 'EnumMsgB', + options: {}, + }, + ], + nestedType: [ + { + name: 'EnumMsgA', + field: [ + { + name: 'foo', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + { + name: 'bar', + number: 2, + label: 'LABEL_OPTIONAL', + type: 'TYPE_ENUM', + typeName: 'ExtEnum', + options: {}, + }, + ], + }, + { + name: 'EnumMsgB', + field: [ + { + name: 'baz', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_ENUM', + typeName: 'ExtEnum', + options: {}, + }, + ], + }, + ], + enumType: [ + { + name: 'ExtEnum', + value: [ + {name: 'UNDEFINED', number: 0}, + {name: 'THING', number: 1}, + {name: 'OTHER_THING', number: 2}, + ], + }, + ], + }); + }); + }); +}); diff --git a/handwritten/bigquery-storage/test/adapt/schema.ts b/handwritten/bigquery-storage/test/adapt/schema.ts new file mode 100644 index 00000000000..6c3568ccf45 --- /dev/null +++ b/handwritten/bigquery-storage/test/adapt/schema.ts @@ -0,0 +1,156 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {google} from '../../protos/protos'; +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +import * as adapt from '../../src/adapt'; + +const TableFieldSchema = google.cloud.bigquery.storage.v1.TableFieldSchema; + +describe('Adapt Schemas', () => { + describe('BigQuery Schema to Storage Schema', () => { + it('basic schema', () => { + const schema = { + fields: [ + {name: 'f1', type: 'STRING', description: 'first field'}, + {name: 'f2', type: 'INTEGER', description: 'second field'}, + {name: 'f3', type: 'BOOL', description: 'third field'}, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + assert.notEqual(storageSchema, null); + if (!storageSchema) { + throw Error('null storage schema'); + } + assert.deepEqual(storageSchema, { + fields: [ + { + name: 'f1', + description: 'first field', + type: TableFieldSchema.Type.STRING, + mode: TableFieldSchema.Mode.NULLABLE, + }, + { + name: 'f2', + description: 'second field', + type: TableFieldSchema.Type.INT64, + mode: TableFieldSchema.Mode.NULLABLE, + }, + { + name: 'f3', + description: 'third field', + type: TableFieldSchema.Type.BOOL, + mode: TableFieldSchema.Mode.NULLABLE, + }, + ], + }); + }); + it('arrays', () => { + const schema = { + fields: [ + { + name: 'arr', + type: 'NUMERIC', + mode: 'REPEATED', + description: 'array field', + }, + { + name: 'big', + type: 'BIGNUMERIC', + mode: 'REQUIRED', + description: 'required big', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + assert.notEqual(storageSchema, null); + if (!storageSchema) { + throw Error('null storage schema'); + } + assert.deepEqual(storageSchema, { + fields: [ + { + name: 'arr', + description: 'array field', + type: TableFieldSchema.Type.NUMERIC, + mode: TableFieldSchema.Mode.REPEATED, + }, + { + name: 'big', + description: 'required big', + type: TableFieldSchema.Type.BIGNUMERIC, + mode: TableFieldSchema.Mode.REQUIRED, + }, + ], + }); + }); + it('nested structs', () => { + const schema = { + fields: [ + { + name: 'struct1', + type: 'RECORD', + description: 'struct field', + fields: [ + {name: 'leaf1', type: 'DATE'}, + {name: 'leaf2', type: 'DATETIME'}, + ], + }, + { + name: 'field2', + type: 'STRING', + description: 'second field', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + assert.notEqual(storageSchema, null); + if (!storageSchema) { + throw Error('null storage schema'); + } + assert.deepEqual(storageSchema, { + fields: [ + { + name: 'struct1', + description: 'struct field', + type: TableFieldSchema.Type.STRUCT, + mode: TableFieldSchema.Mode.NULLABLE, + fields: [ + { + name: 'leaf1', + type: TableFieldSchema.Type.DATE, + mode: TableFieldSchema.Mode.NULLABLE, + }, + { + name: 'leaf2', + type: TableFieldSchema.Type.DATETIME, + mode: TableFieldSchema.Mode.NULLABLE, + }, + ], + }, + { + name: 'field2', + description: 'second field', + type: TableFieldSchema.Type.STRING, + mode: TableFieldSchema.Mode.NULLABLE, + }, + ], + }); + }); + }); +}); From 890cbd3846cb104088b7102b828ac70536f43522 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 09:49:34 -0400 Subject: [PATCH 217/333] docs: update documentation for bigquerystorage v1beta1 (#337) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: update documentation for bigquerystorage v1beta1 PiperOrigin-RevId: 531289380 Source-Link: https://github.com/googleapis/googleapis/commit/3d497fa928b06e86847975e6cb9932b3701eb2c7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1e50499897357054d51c9c7882afd874b79f657b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWU1MDQ5OTg5NzM1NzA1NGQ1MWM5Yzc4ODJhZmQ4NzRiNzlmNjU3YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.gitignore | 1 - .../bigquery/storage/v1beta1/arrow.proto | 2 +- .../cloud/bigquery/storage/v1beta1/avro.proto | 2 +- .../storage/v1beta1/read_options.proto | 57 ++++++- .../bigquery/storage/v1beta1/storage.proto | 100 +++++++----- .../storage/v1beta1/table_reference.proto | 2 +- .../bigquery-storage/protos/protos.d.ts | 43 ++++-- handwritten/bigquery-storage/protos/protos.js | 143 ++++++++++++++---- .../bigquery-storage/protos/protos.json | 48 ++++-- .../big_query_storage.create_read_session.js | 1 + ...google.cloud.bigquery.storage.v1beta1.json | 6 +- .../src/v1beta1/big_query_storage_client.ts | 10 +- 12 files changed, 302 insertions(+), 113 deletions(-) diff --git a/handwritten/bigquery-storage/.gitignore b/handwritten/bigquery-storage/.gitignore index 013e7d2ee2f..d4f03a0df2e 100644 --- a/handwritten/bigquery-storage/.gitignore +++ b/handwritten/bigquery-storage/.gitignore @@ -1,7 +1,6 @@ **/*.log **/node_modules /.coverage -samples/.coverage /coverage /.nyc_output /docs/ diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto index 77dbfe3eb26..4894e2fea8d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto index 222bdcc13f5..ad388d42d8c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 90fb7f3e492..2a4a9076169 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,19 +21,64 @@ option java_package = "com.google.cloud.bigquery.storage.v1beta1"; // Options dictating how we read a table. message TableReadOptions { - // Optional. Names of the fields in the table that should be read. If empty, - // all fields will be read. If the specified field is a nested field, all the - // sub-fields in the field will be selected. The output field order is - // unrelated to the order of fields in selected_fields. + // Optional. The names of the fields in the table to be returned. If no + // field names are specified, then all fields in the table are returned. + // + // Nested fields -- the child elements of a STRUCT field -- can be selected + // individually using their fully-qualified names, and will be returned as + // record fields containing only the selected nested fields. If a STRUCT + // field is specified in the selected fields list, all of the child elements + // will be returned. + // + // As an example, consider a table with the following schema: + // + // { + // "name": "struct_field", + // "type": "RECORD", + // "mode": "NULLABLE", + // "fields": [ + // { + // "name": "string_field1", + // "type": "STRING", + // . "mode": "NULLABLE" + // }, + // { + // "name": "string_field2", + // "type": "STRING", + // "mode": "NULLABLE" + // } + // ] + // } + // + // Specifying "struct_field" in the selected fields list will result in a + // read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // string_field2 + // } + // + // Specifying "struct_field.string_field1" in the selected fields list will + // result in a read session schema with the following logical structure: + // + // struct_field { + // string_field1 + // } + // + // The order of the fields in the read session schema is derived from the + // table schema and does not correspond to the order in which the fields are + // specified in this list. repeated string selected_fields = 1; // Optional. SQL text filtering statement, similar to a WHERE clause in - // a query. Aggregates are not supported. + // a SQL query. Aggregates are not supported. // // Examples: "int_field > 5" // "date_field = CAST('2014-9-27' as DATE)" // "nullable_field is not NULL" // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" // "numeric_field BETWEEN 1.0 AND 5.0" + // + // Restricted to a maximum length for 1 MB. string row_restriction = 2; } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 2bf2a2c094d..3f9f218f12a 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -33,6 +33,11 @@ option java_package = "com.google.cloud.bigquery.storage.v1beta1"; // BigQuery storage API. // // The BigQuery storage API can be used to read data stored in BigQuery. +// +// The v1beta1 API is not yet officially deprecated, and will go through a full +// deprecation cycle (https://cloud.google.com/products#product-launch-stages) +// before the service is turned down. However, new code should use the v1 API +// going forward. service BigQueryStorage { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = @@ -49,7 +54,7 @@ service BigQueryStorage { // reached the end of each stream in the session, then all the data in the // table has been read. // - // Read sessions automatically expire 24 hours after they are created and do + // Read sessions automatically expire 6 hours after they are created and do // not require manual clean-up by the caller. rpc CreateReadSession(CreateReadSessionRequest) returns (ReadSession) { option (google.api.http) = { @@ -60,7 +65,8 @@ service BigQueryStorage { body: "*" } }; - option (google.api.method_signature) = "table_reference,parent,requested_streams"; + option (google.api.method_signature) = + "table_reference,parent,requested_streams"; } // Reads rows from the table in the format prescribed by the read session. @@ -82,7 +88,8 @@ service BigQueryStorage { // Creates additional streams for a ReadSession. This API can be used to // dynamically adjust the parallelism of a batch processing task upwards by // adding additional workers. - rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) returns (BatchCreateReadSessionStreamsResponse) { + rpc BatchCreateReadSessionStreams(BatchCreateReadSessionStreamsRequest) + returns (BatchCreateReadSessionStreamsResponse) { option (google.api.http) = { post: "/v1beta1/{session.name=projects/*/sessions/*}" body: "*" @@ -90,7 +97,7 @@ service BigQueryStorage { option (google.api.method_signature) = "session,requested_streams"; } - // Triggers the graceful termination of a single stream in a ReadSession. This + // Causes a single stream in a ReadSession to gracefully stop. This // API can be used to dynamically adjust the parallelism of a batch processing // task downwards without losing data. // @@ -125,7 +132,8 @@ service BigQueryStorage { // completion. // // This method is guaranteed to be idempotent. - rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) { + rpc SplitReadStream(SplitReadStreamRequest) + returns (SplitReadStreamResponse) { option (google.api.http) = { get: "/v1beta1/{original_stream.name=projects/*/streams/*}" }; @@ -193,6 +201,40 @@ message ReadSession { ShardingStrategy sharding_strategy = 9; } +// Data format for input or output data. +enum DataFormat { + // Data format is unspecified. + DATA_FORMAT_UNSPECIFIED = 0; + + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + AVRO = 1; + + // Arrow is a standard open source column-based message format. + // See https://arrow.apache.org/ for more details. + ARROW = 3; +} + +// Strategy for distributing data among multiple streams in a read session. +enum ShardingStrategy { + // Same as LIQUID. + SHARDING_STRATEGY_UNSPECIFIED = 0; + + // Assigns data to each stream based on the client's read rate. The faster the + // client reads from a stream, the more data is assigned to the stream. In + // this strategy, it's possible to read all data from a single stream even if + // there are other streams present. + LIQUID = 1; + + // Assigns data to each stream such that roughly the same number of rows can + // be read from each stream. Because the server-side unit for assigning data + // is collections of rows, the API does not guarantee that each stream will + // return the same number or rows. Additionally, the limits are enforced based + // on the number of pre-filtering rows, so some filters can lead to lopsided + // assignments. + BALANCED = 2; +} + // Creates a new read session, which may include additional options such as // requested parallelism, projection filters and constraints. message CreateReadSessionRequest { @@ -225,6 +267,7 @@ message CreateReadSessionRequest { TableReadOptions read_options = 4; // Data output format. Currently default to Avro. + // DATA_FORMAT_UNSPECIFIED not supported. DataFormat format = 5; // The strategy to use for distributing data among multiple streams. Currently @@ -232,38 +275,6 @@ message CreateReadSessionRequest { ShardingStrategy sharding_strategy = 7; } -// Data format for input or output data. -enum DataFormat { - // Data format is unspecified. - DATA_FORMAT_UNSPECIFIED = 0; - - // Avro is a standard open source row based file format. - // See https://avro.apache.org/ for more details. - AVRO = 1; - - ARROW = 3; -} - -// Strategy for distributing data among multiple streams in a read session. -enum ShardingStrategy { - // Same as LIQUID. - SHARDING_STRATEGY_UNSPECIFIED = 0; - - // Assigns data to each stream based on the client's read rate. The faster the - // client reads from a stream, the more data is assigned to the stream. In - // this strategy, it's possible to read all data from a single stream even if - // there are other streams present. - LIQUID = 1; - - // Assigns data to each stream such that roughly the same number of rows can - // be read from each stream. Because the server-side unit for assigning data - // is collections of rows, the API does not guarantee that each stream will - // return the same number or rows. Additionally, the limits are enforced based - // on the number of pre-filtering rows, so some filters can lead to lopsided - // assignments. - BALANCED = 2; -} - // Requesting row data via `ReadRows` must provide Stream position information. message ReadRowsRequest { // Required. Identifier of the position in the stream to start reading from. @@ -349,6 +360,19 @@ message ReadRowsResponse { // Throttling status. If unset, the latest response still describes // the current throttling status. ThrottleStatus throttle_status = 5; + + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. This schema is equivalent to the one returned by + // CreateSession. This field is only populated in the first ReadRowsResponse + // RPC. + oneof schema { + // Output only. Avro schema. + AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Arrow schema. + ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + } } // Information needed to request additional streams for an established read diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index 9f643cd65e6..3fe3dfe27d2 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 0056863719b..569187a7dfb 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -5334,6 +5334,20 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** DataFormat enum. */ + enum DataFormat { + DATA_FORMAT_UNSPECIFIED = 0, + AVRO = 1, + ARROW = 3 + } + + /** ShardingStrategy enum. */ + enum ShardingStrategy { + SHARDING_STRATEGY_UNSPECIFIED = 0, + LIQUID = 1, + BALANCED = 2 + } + /** Properties of a CreateReadSessionRequest. */ interface ICreateReadSessionRequest { @@ -5467,20 +5481,6 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } - /** DataFormat enum. */ - enum DataFormat { - DATA_FORMAT_UNSPECIFIED = 0, - AVRO = 1, - ARROW = 3 - } - - /** ShardingStrategy enum. */ - enum ShardingStrategy { - SHARDING_STRATEGY_UNSPECIFIED = 0, - LIQUID = 1, - BALANCED = 2 - } - /** Properties of a ReadRowsRequest. */ interface IReadRowsRequest { @@ -5910,6 +5910,12 @@ export namespace google { /** ReadRowsResponse throttleStatus */ throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + + /** ReadRowsResponse avroSchema */ + avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema */ + arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); } /** Represents a ReadRowsResponse. */ @@ -5936,9 +5942,18 @@ export namespace google { /** ReadRowsResponse throttleStatus. */ public throttleStatus?: (google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null); + /** ReadRowsResponse avroSchema. */ + public avroSchema?: (google.cloud.bigquery.storage.v1beta1.IAvroSchema|null); + + /** ReadRowsResponse arrowSchema. */ + public arrowSchema?: (google.cloud.bigquery.storage.v1beta1.IArrowSchema|null); + /** ReadRowsResponse rows. */ public rows?: ("avroRows"|"arrowRecordBatch"); + /** ReadRowsResponse schema. */ + public schema?: ("avroSchema"|"arrowSchema"); + /** * Creates a new ReadRowsResponse instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 71d111aeb5d..64d5c79ed7a 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -12907,6 +12907,38 @@ return ReadSession; })(); + /** + * DataFormat enum. + * @name google.cloud.bigquery.storage.v1beta1.DataFormat + * @enum {number} + * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value + * @property {number} AVRO=1 AVRO value + * @property {number} ARROW=3 ARROW value + */ + v1beta1.DataFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; + values[valuesById[1] = "AVRO"] = 1; + values[valuesById[3] = "ARROW"] = 3; + return values; + })(); + + /** + * ShardingStrategy enum. + * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy + * @enum {number} + * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value + * @property {number} LIQUID=1 LIQUID value + * @property {number} BALANCED=2 BALANCED value + */ + v1beta1.ShardingStrategy = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; + values[valuesById[1] = "LIQUID"] = 1; + values[valuesById[2] = "BALANCED"] = 2; + return values; + })(); + v1beta1.CreateReadSessionRequest = (function() { /** @@ -13312,38 +13344,6 @@ return CreateReadSessionRequest; })(); - /** - * DataFormat enum. - * @name google.cloud.bigquery.storage.v1beta1.DataFormat - * @enum {number} - * @property {number} DATA_FORMAT_UNSPECIFIED=0 DATA_FORMAT_UNSPECIFIED value - * @property {number} AVRO=1 AVRO value - * @property {number} ARROW=3 ARROW value - */ - v1beta1.DataFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DATA_FORMAT_UNSPECIFIED"] = 0; - values[valuesById[1] = "AVRO"] = 1; - values[valuesById[3] = "ARROW"] = 3; - return values; - })(); - - /** - * ShardingStrategy enum. - * @name google.cloud.bigquery.storage.v1beta1.ShardingStrategy - * @enum {number} - * @property {number} SHARDING_STRATEGY_UNSPECIFIED=0 SHARDING_STRATEGY_UNSPECIFIED value - * @property {number} LIQUID=1 LIQUID value - * @property {number} BALANCED=2 BALANCED value - */ - v1beta1.ShardingStrategy = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "SHARDING_STRATEGY_UNSPECIFIED"] = 0; - values[valuesById[1] = "LIQUID"] = 1; - values[valuesById[2] = "BALANCED"] = 2; - return values; - })(); - v1beta1.ReadRowsRequest = (function() { /** @@ -14285,6 +14285,8 @@ * @property {number|Long|null} [rowCount] ReadRowsResponse rowCount * @property {google.cloud.bigquery.storage.v1beta1.IStreamStatus|null} [status] ReadRowsResponse status * @property {google.cloud.bigquery.storage.v1beta1.IThrottleStatus|null} [throttleStatus] ReadRowsResponse throttleStatus + * @property {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema + * @property {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema */ /** @@ -14342,6 +14344,22 @@ */ ReadRowsResponse.prototype.throttleStatus = null; + /** + * ReadRowsResponse avroSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IAvroSchema|null|undefined} avroSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.avroSchema = null; + + /** + * ReadRowsResponse arrowSchema. + * @member {google.cloud.bigquery.storage.v1beta1.IArrowSchema|null|undefined} arrowSchema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.arrowSchema = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -14356,6 +14374,17 @@ set: $util.oneOfSetter($oneOfFields) }); + /** + * ReadRowsResponse schema. + * @member {"avroSchema"|"arrowSchema"|undefined} schema + * @memberof google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "schema", { + get: $util.oneOfGetter($oneOfFields = ["avroSchema", "arrowSchema"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new ReadRowsResponse instance using the specified properties. * @function create @@ -14390,6 +14419,10 @@ $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.encode(message.throttleStatus, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.rowCount != null && Object.hasOwnProperty.call(message, "rowCount")) writer.uint32(/* id 6, wireType 0 =*/48).int64(message.rowCount); + if (message.avroSchema != null && Object.hasOwnProperty.call(message, "avroSchema")) + $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) + $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); return writer; }; @@ -14444,6 +14477,14 @@ message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.decode(reader, reader.uint32()); break; } + case 7: { + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.decode(reader, reader.uint32()); + break; + } + case 8: { + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -14511,6 +14552,24 @@ if (error) return "throttleStatus." + error; } + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.verify(message.avroSchema); + if (error) + return "avroSchema." + error; + } + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + if (properties.schema === 1) + return "schema: multiple values"; + properties.schema = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.verify(message.arrowSchema); + if (error) + return "arrowSchema." + error; + } + } return null; }; @@ -14555,6 +14614,16 @@ throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.throttleStatus: object expected"); message.throttleStatus = $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus.fromObject(object.throttleStatus); } + if (object.avroSchema != null) { + if (typeof object.avroSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.avroSchema: object expected"); + message.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.fromObject(object.avroSchema); + } + if (object.arrowSchema != null) { + if (typeof object.arrowSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse.arrowSchema: object expected"); + message.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.fromObject(object.arrowSchema); + } return message; }; @@ -14599,6 +14668,16 @@ object.rowCount = options.longs === String ? String(message.rowCount) : message.rowCount; else object.rowCount = options.longs === String ? $util.Long.prototype.toString.call(message.rowCount) : options.longs === Number ? new $util.LongBits(message.rowCount.low >>> 0, message.rowCount.high >>> 0).toNumber() : message.rowCount; + if (message.avroSchema != null && message.hasOwnProperty("avroSchema")) { + object.avroSchema = $root.google.cloud.bigquery.storage.v1beta1.AvroSchema.toObject(message.avroSchema, options); + if (options.oneofs) + object.schema = "avroSchema"; + } + if (message.arrowSchema != null && message.hasOwnProperty("arrowSchema")) { + object.arrowSchema = $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema.toObject(message.arrowSchema, options); + if (options.oneofs) + object.schema = "arrowSchema"; + } return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 753ce42499c..54974215987 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1333,6 +1333,20 @@ } } }, + "DataFormat": { + "values": { + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, + "ARROW": 3 + } + }, + "ShardingStrategy": { + "values": { + "SHARDING_STRATEGY_UNSPECIFIED": 0, + "LIQUID": 1, + "BALANCED": 2 + } + }, "CreateReadSessionRequest": { "fields": { "tableReference": { @@ -1372,20 +1386,6 @@ } } }, - "DataFormat": { - "values": { - "DATA_FORMAT_UNSPECIFIED": 0, - "AVRO": 1, - "ARROW": 3 - } - }, - "ShardingStrategy": { - "values": { - "SHARDING_STRATEGY_UNSPECIFIED": 0, - "LIQUID": 1, - "BALANCED": 2 - } - }, "ReadRowsRequest": { "fields": { "readPosition": { @@ -1444,6 +1444,12 @@ "avroRows", "arrowRecordBatch" ] + }, + "schema": { + "oneof": [ + "avroSchema", + "arrowSchema" + ] } }, "fields": { @@ -1466,6 +1472,20 @@ "throttleStatus": { "type": "ThrottleStatus", "id": 5 + }, + "avroSchema": { + "type": "AvroSchema", + "id": 7, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "arrowSchema": { + "type": "ArrowSchema", + "id": 8, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } } }, diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index b7ee90090d5..b1dafbef295 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -57,6 +57,7 @@ function main(tableReference, parent) { // const readOptions = {} /** * Data output format. Currently default to Avro. + * DATA_FORMAT_UNSPECIFIED not supported. */ // const format = {} /** diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 5eccd319c53..8ccb71e2d61 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -15,14 +15,14 @@ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async", "title": "BigQueryStorage createReadSession Sample", "origin": "API_DEFINITION", - "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 24 hours after they are created and do not require manual clean-up by the caller.", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", "canonical": true, "file": "big_query_storage.create_read_session.js", "language": "JAVASCRIPT", "segments": [ { "start": 25, - "end": 86, + "end": 87, "type": "FULL" } ], @@ -163,7 +163,7 @@ "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async", "title": "BigQueryStorage finalizeStream Sample", "origin": "API_DEFINITION", - "description": " Triggers the graceful termination of a single stream in a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", + "description": " Causes a single stream in a ReadSession to gracefully stop. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", "canonical": true, "file": "big_query_storage.finalize_stream.js", "language": "JAVASCRIPT", diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 7fa7c8e90fe..51869099b26 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -39,6 +39,11 @@ const version = require('../../../package.json').version; * BigQuery storage API. * * The BigQuery storage API can be used to read data stored in BigQuery. + * + * The v1beta1 API is not yet officially deprecated, and will go through a full + * deprecation cycle (https://cloud.google.com/products#product-launch-stages) + * before the service is turned down. However, new code should use the v1 API + * going forward. * @class * @memberof v1beta1 */ @@ -360,7 +365,7 @@ export class BigQueryStorageClient { * reached the end of each stream in the session, then all the data in the * table has been read. * - * Read sessions automatically expire 24 hours after they are created and do + * Read sessions automatically expire 6 hours after they are created and do * not require manual clean-up by the caller. * * @param {Object} request @@ -385,6 +390,7 @@ export class BigQueryStorageClient { * Read options for this session (e.g. column selection, filters). * @param {google.cloud.bigquery.storage.v1beta1.DataFormat} request.format * Data output format. Currently default to Avro. + * DATA_FORMAT_UNSPECIFIED not supported. * @param {google.cloud.bigquery.storage.v1beta1.ShardingStrategy} request.shardingStrategy * The strategy to use for distributing data among multiple streams. Currently * defaults to liquid sharding. @@ -588,7 +594,7 @@ export class BigQueryStorageClient { ); } /** - * Triggers the graceful termination of a single stream in a ReadSession. This + * Causes a single stream in a ReadSession to gracefully stop. This * API can be used to dynamically adjust the parallelism of a batch processing * task downwards without losing data. * From e3e55525782cdc7ec3a61daf86ec112dad42c2b2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 26 May 2023 09:58:46 -0400 Subject: [PATCH 218/333] docs: update docs-devsite.sh to use latest node-js-rad version (#339) Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> Source-Link: https://github.com/googleapis/synthtool/commit/b1ced7db5adee08cfa91d6b138679fceff32c004 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:0527a86c10b67742c409dc726ba9a31ec4e69b0006e3d7a49b0e6686c59cdaa9 Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 3 ++- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 0b836e11907..21ad18bd722 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:e6d785d6de3cab027f6213d95ccedab4cab3811b0d3172b78db2216faa182e32 + digest: sha256:0527a86c10b67742c409dc726ba9a31ec4e69b0006e3d7a49b0e6686c59cdaa9 +# created: 2023-05-24T20:32:43.844586914Z diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 2198e67fe92..3596c1e4cb1 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -25,5 +25,6 @@ if [[ -z "$CREDENTIALS" ]]; then fi npm install -npm install --no-save @google-cloud/cloud-rad@^0.2.5 -npx @google-cloud/cloud-rad \ No newline at end of file +npm install --no-save @google-cloud/cloud-rad@^0.3.7 +# publish docs to devsite +npx @google-cloud/cloud-rad . cloud-rad From ad3da375a788fdb58acb389b176937f8baa61ef7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Jun 2023 13:27:10 -0700 Subject: [PATCH 219/333] feat: add table sampling to ReadAPI v1 (#338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add table sampling to ReadAPI v1 feat: add storage error codes for KMS PiperOrigin-RevId: 534092654 Source-Link: https://github.com/googleapis/googleapis/commit/adcd87eb8cc501ba16f4df3051869c9392e9041f Source-Link: https://github.com/googleapis/googleapis-gen/commit/53f03dcd2ae0d86832d87a530aa538b9daebf2b0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTNmMDNkY2QyYWUwZDg2ODMyZDg3YTUzMGFhNTM4YjlkYWViZjJiMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: meredithslota --- .../cloud/bigquery/storage/v1/arrow.proto | 2 +- .../cloud/bigquery/storage/v1/avro.proto | 2 +- .../cloud/bigquery/storage/v1/protobuf.proto | 2 +- .../cloud/bigquery/storage/v1/storage.proto | 18 ++++- .../cloud/bigquery/storage/v1/stream.proto | 11 ++- .../cloud/bigquery/storage/v1/table.proto | 2 +- .../bigquery-storage/protos/protos.d.ts | 16 +++- handwritten/bigquery-storage/protos/protos.js | 73 +++++++++++++++++++ .../bigquery-storage/protos/protos.json | 20 ++++- 9 files changed, 138 insertions(+), 8 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 4ac268c8b02..05036d21db5 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index 52441e9fcf9..588406aba31 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto index 05ac778f03d..e12f4d8db07 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index e84a58df778..d28c36f43f4 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -669,6 +669,22 @@ message StorageError { // Offset out of range. OFFSET_OUT_OF_RANGE = 9; + + // Customer-managed encryption key (CMEK) not provided for CMEK-enabled + // data. + CMEK_NOT_PROVIDED = 10; + + // Customer-managed encryption key (CMEK) was incorrectly provided. + INVALID_CMEK_PROVIDED = 11; + + // There is an encryption error while using customer-managed encryption key. + CMEK_ENCRYPTION_ERROR = 12; + + // Key Management Service (KMS) service returned an error. + KMS_SERVICE_ERROR = 13; + + // Permission denied while using customer-managed encryption key. + KMS_PERMISSION_DENIED = 14; } // BigQuery Storage specific error code. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 85f6dd82575..e72932e187d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -129,6 +129,15 @@ message ReadSession { AvroSerializationOptions avro_serialization_options = 4 [(google.api.field_behavior) = OPTIONAL]; } + + // Optional. Specifies a table sampling percentage. Specifically, the query + // planner will use TABLESAMPLE SYSTEM (sample_percentage PERCENT). This + // samples at the file-level. It will randomly choose for each file whether + // to include that file in the sample returned. Note, that if the table only + // has one file, then TABLESAMPLE SYSTEM will select that file and return + // all returnable rows contained within. + optional double sample_percentage = 5 + [(google.api.field_behavior) = OPTIONAL]; } // Output only. Unique identifier for the session, in the form diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index c9b62d7932e..47629c510e6 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -1,4 +1,4 @@ -// Copyright 2022 Google LLC +// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 569187a7dfb..2e0c02d9802 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -3316,7 +3316,12 @@ export namespace google { STREAM_FINALIZED = 6, SCHEMA_MISMATCH_EXTRA_FIELDS = 7, OFFSET_ALREADY_EXISTS = 8, - OFFSET_OUT_OF_RANGE = 9 + OFFSET_OUT_OF_RANGE = 9, + CMEK_NOT_PROVIDED = 10, + INVALID_CMEK_PROVIDED = 11, + CMEK_ENCRYPTION_ERROR = 12, + KMS_SERVICE_ERROR = 13, + KMS_PERMISSION_DENIED = 14 } } @@ -3724,6 +3729,9 @@ export namespace google { /** TableReadOptions avroSerializationOptions */ avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); + + /** TableReadOptions samplePercentage */ + samplePercentage?: (number|null); } /** Represents a TableReadOptions. */ @@ -3747,9 +3755,15 @@ export namespace google { /** TableReadOptions avroSerializationOptions. */ public avroSerializationOptions?: (google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null); + /** TableReadOptions samplePercentage. */ + public samplePercentage?: (number|null); + /** TableReadOptions outputFormatSerializationOptions. */ public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); + /** TableReadOptions _samplePercentage. */ + public _samplePercentage?: "samplePercentage"; + /** * Creates a new TableReadOptions instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 64d5c79ed7a..c2ce9c76aa5 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -7521,6 +7521,11 @@ case 7: case 8: case 9: + case 10: + case 11: + case 12: + case 13: + case 14: break; } if (message.entity != null && message.hasOwnProperty("entity")) @@ -7591,6 +7596,26 @@ case 9: message.code = 9; break; + case "CMEK_NOT_PROVIDED": + case 10: + message.code = 10; + break; + case "INVALID_CMEK_PROVIDED": + case 11: + message.code = 11; + break; + case "CMEK_ENCRYPTION_ERROR": + case 12: + message.code = 12; + break; + case "KMS_SERVICE_ERROR": + case 13: + message.code = 13; + break; + case "KMS_PERMISSION_DENIED": + case 14: + message.code = 14; + break; } if (object.entity != null) message.entity = String(object.entity); @@ -7666,6 +7691,11 @@ * @property {number} SCHEMA_MISMATCH_EXTRA_FIELDS=7 SCHEMA_MISMATCH_EXTRA_FIELDS value * @property {number} OFFSET_ALREADY_EXISTS=8 OFFSET_ALREADY_EXISTS value * @property {number} OFFSET_OUT_OF_RANGE=9 OFFSET_OUT_OF_RANGE value + * @property {number} CMEK_NOT_PROVIDED=10 CMEK_NOT_PROVIDED value + * @property {number} INVALID_CMEK_PROVIDED=11 INVALID_CMEK_PROVIDED value + * @property {number} CMEK_ENCRYPTION_ERROR=12 CMEK_ENCRYPTION_ERROR value + * @property {number} KMS_SERVICE_ERROR=13 KMS_SERVICE_ERROR value + * @property {number} KMS_PERMISSION_DENIED=14 KMS_PERMISSION_DENIED value */ StorageError.StorageErrorCode = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -7679,6 +7709,11 @@ values[valuesById[7] = "SCHEMA_MISMATCH_EXTRA_FIELDS"] = 7; values[valuesById[8] = "OFFSET_ALREADY_EXISTS"] = 8; values[valuesById[9] = "OFFSET_OUT_OF_RANGE"] = 9; + values[valuesById[10] = "CMEK_NOT_PROVIDED"] = 10; + values[valuesById[11] = "INVALID_CMEK_PROVIDED"] = 11; + values[valuesById[12] = "CMEK_ENCRYPTION_ERROR"] = 12; + values[valuesById[13] = "KMS_SERVICE_ERROR"] = 13; + values[valuesById[14] = "KMS_PERMISSION_DENIED"] = 14; return values; })(); @@ -8796,6 +8831,7 @@ * @property {string|null} [rowRestriction] TableReadOptions rowRestriction * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions * @property {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null} [avroSerializationOptions] TableReadOptions avroSerializationOptions + * @property {number|null} [samplePercentage] TableReadOptions samplePercentage */ /** @@ -8846,6 +8882,14 @@ */ TableReadOptions.prototype.avroSerializationOptions = null; + /** + * TableReadOptions samplePercentage. + * @member {number|null|undefined} samplePercentage + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.samplePercentage = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -8860,6 +8904,17 @@ set: $util.oneOfSetter($oneOfFields) }); + /** + * TableReadOptions _samplePercentage. + * @member {"samplePercentage"|undefined} _samplePercentage + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + Object.defineProperty(TableReadOptions.prototype, "_samplePercentage", { + get: $util.oneOfGetter($oneOfFields = ["samplePercentage"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new TableReadOptions instance using the specified properties. * @function create @@ -8893,6 +8948,8 @@ $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.encode(message.arrowSerializationOptions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.avroSerializationOptions != null && Object.hasOwnProperty.call(message, "avroSerializationOptions")) $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.encode(message.avroSerializationOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.samplePercentage != null && Object.hasOwnProperty.call(message, "samplePercentage")) + writer.uint32(/* id 5, wireType 1 =*/41).double(message.samplePercentage); return writer; }; @@ -8945,6 +9002,10 @@ message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.decode(reader, reader.uint32()); break; } + case 5: { + message.samplePercentage = reader.double(); + break; + } default: reader.skipType(tag & 7); break; @@ -9009,6 +9070,11 @@ return "avroSerializationOptions." + error; } } + if (message.samplePercentage != null && message.hasOwnProperty("samplePercentage")) { + properties._samplePercentage = 1; + if (typeof message.samplePercentage !== "number") + return "samplePercentage: number expected"; + } return null; }; @@ -9043,6 +9109,8 @@ throw TypeError(".google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.avroSerializationOptions: object expected"); message.avroSerializationOptions = $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.fromObject(object.avroSerializationOptions); } + if (object.samplePercentage != null) + message.samplePercentage = Number(object.samplePercentage); return message; }; @@ -9080,6 +9148,11 @@ if (options.oneofs) object.outputFormatSerializationOptions = "avroSerializationOptions"; } + if (message.samplePercentage != null && message.hasOwnProperty("samplePercentage")) { + object.samplePercentage = options.json && !isFinite(message.samplePercentage) ? String(message.samplePercentage) : message.samplePercentage; + if (options.oneofs) + object._samplePercentage = "samplePercentage"; + } return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 54974215987..f37029dbf6e 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -702,7 +702,12 @@ "STREAM_FINALIZED": 6, "SCHEMA_MISMATCH_EXTRA_FIELDS": 7, "OFFSET_ALREADY_EXISTS": 8, - "OFFSET_OUT_OF_RANGE": 9 + "OFFSET_OUT_OF_RANGE": 9, + "CMEK_NOT_PROVIDED": 10, + "INVALID_CMEK_PROVIDED": 11, + "CMEK_ENCRYPTION_ERROR": 12, + "KMS_SERVICE_ERROR": 13, + "KMS_PERMISSION_DENIED": 14 } } } @@ -855,6 +860,11 @@ "arrowSerializationOptions", "avroSerializationOptions" ] + }, + "_samplePercentage": { + "oneof": [ + "samplePercentage" + ] } }, "fields": { @@ -880,6 +890,14 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "samplePercentage": { + "type": "double", + "id": 5, + "options": { + "(google.api.field_behavior)": "OPTIONAL", + "proto3_optional": true + } } } } From ed821322d1511b5d8cae5c8cf6a6c8712f30360a Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 5 Jun 2023 16:02:45 -0400 Subject: [PATCH 220/333] test: fix typescript version in pack-n-play system test (#345) --- .../bigquery-storage/system-test/install.ts | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index f61fe236476..3e4fc28a671 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -30,9 +30,15 @@ describe('📦 pack-n-play test', () => { ts: readFileSync( './system-test/fixtures/sample/src/index.ts' ).toString(), + dependencies: ['typescript@4.8.3'], }, }; - await packNTest(options); + try { + await packNTest(options); + } catch (err) { + console.error('TS install failed:\n', err); + throw err; + } }); it('JavaScript code', async function () { @@ -44,8 +50,14 @@ describe('📦 pack-n-play test', () => { ts: readFileSync( './system-test/fixtures/sample/src/index.js' ).toString(), + dependencies: ['typescript@4.8.3'], }, }; - await packNTest(options); + try { + await packNTest(options); + } catch (err) { + console.error('JS install failed:\n', err); + throw err; + } }); }); From c6e7896c6754a5f8be7b86f58b8853213c032595 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 5 Jun 2023 16:47:55 -0400 Subject: [PATCH 221/333] chore(main): release 3.4.0 (#324) --- handwritten/bigquery-storage/CHANGELOG.md | 14 ++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ..._metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...data.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 5ee19526a33..59790bd297c 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [3.4.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.3.0...v3.4.0) (2023-06-05) + + +### Features + +* Adapt package to convert TableSchema to ProtoDescriptor ([#326](https://github.com/googleapis/nodejs-bigquery-storage/issues/326)) ([2d189e9](https://github.com/googleapis/nodejs-bigquery-storage/commit/2d189e9bf3fc0e4721110b148eede9f3ddbed29e)) +* Add default_value_expression to TableFieldSchema ([#323](https://github.com/googleapis/nodejs-bigquery-storage/issues/323)) ([b2aa964](https://github.com/googleapis/nodejs-bigquery-storage/commit/b2aa964153b048b550454f835f5aa6dd825a5362)) +* Add table sampling to ReadAPI v1 ([#338](https://github.com/googleapis/nodejs-bigquery-storage/issues/338)) ([18bb7b0](https://github.com/googleapis/nodejs-bigquery-storage/commit/18bb7b07bd0eb255d49aecd60f45d88b005d8610)) + + +### Bug Fixes + +* Re-introduce routing headers for Write API ([#340](https://github.com/googleapis/nodejs-bigquery-storage/issues/340)) ([db6eca6](https://github.com/googleapis/nodejs-bigquery-storage/commit/db6eca647e704d0c0169f394e80986b1db127d66)) + ## [3.3.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.2.1...v3.3.0) (2023-01-25) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 9b3ca7f71ed..7ed1ff09127 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.3.0", + "version": "3.4.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 2fe9c974354..9b845d8bb33 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.3.0", + "version": "3.4.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 8ccb71e2d61..58a87a47a3b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.3.0", + "version": "3.4.0", "language": "TYPESCRIPT", "apis": [ { From 448c3c8da80284ce0d105b625992f7eaac009dcc Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 20 Jun 2023 22:18:19 +0200 Subject: [PATCH 222/333] chore(deps): update dependency c8 to v8 (#346) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7ed1ff09127..9e96de96e4a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -33,7 +33,7 @@ "@types/mocha": "^9.0.0", "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", - "c8": "^7.1.0", + "c8": "^8.0.0", "gts": "^3.1.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^2.0.0", From 5f3c30ae35e3776be41413acd345a194e0eb6c2c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 27 Jul 2023 14:34:52 -0400 Subject: [PATCH 223/333] chore: upgrade to Node 14 (#350) Source-Link: https://github.com/googleapis/synthtool/commit/2d2d5e5c4e0eb30b0a7c2c95576e4e89c8443b35 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:bfa6fdba19aa7d105167d01fb51f5fd8285e8cd9fca264e43aff849e9e7fa36c Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../bigquery-storage/.kokoro/common.cfg | 2 +- .../.kokoro/continuous/node14/common.cfg | 24 +++++++++++++++++++ .../.kokoro/continuous/node14/lint.cfg | 4 ++++ .../continuous/node14/samples-test.cfg | 12 ++++++++++ .../.kokoro/continuous/node14/system-test.cfg | 12 ++++++++++ .../.kokoro/continuous/node14/test.cfg | 0 .../.kokoro/presubmit/node14/common.cfg | 24 +++++++++++++++++++ .../.kokoro/presubmit/node14/samples-test.cfg | 12 ++++++++++ .../.kokoro/presubmit/node14/system-test.cfg | 12 ++++++++++ .../.kokoro/presubmit/node14/test.cfg | 0 .../bigquery-storage/.kokoro/release/docs.cfg | 2 +- .../.kokoro/release/publish.cfg | 2 +- .../bigquery-storage/.kokoro/samples-test.sh | 2 +- .../bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- 16 files changed, 108 insertions(+), 8 deletions(-) create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node14/test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg create mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node14/test.cfg diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 21ad18bd722..9e959ba1520 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:0527a86c10b67742c409dc726ba9a31ec4e69b0006e3d7a49b0e6686c59cdaa9 -# created: 2023-05-24T20:32:43.844586914Z + digest: sha256:bfa6fdba19aa7d105167d01fb51f5fd8285e8cd9fca264e43aff849e9e7fa36c +# created: 2023-07-06T17:45:12.014855061Z diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg index 7fc0cdeac69..2c6bf4338e5 100644 --- a/handwritten/bigquery-storage/.kokoro/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg new file mode 100644 index 00000000000..2c6bf4338e5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg new file mode 100644 index 00000000000..0a5d546b96b --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg @@ -0,0 +1,4 @@ +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg new file mode 100644 index 00000000000..9ca77598360 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg new file mode 100644 index 00000000000..42454cf416c --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node14/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg new file mode 100644 index 00000000000..2c6bf4338e5 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg @@ -0,0 +1,24 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/test.sh" +} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg new file mode 100644 index 00000000000..9ca77598360 --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg new file mode 100644 index 00000000000..42454cf416c --- /dev/null +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg @@ -0,0 +1,12 @@ +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "long-door-651-kokoro-system-test-service-account" +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node14/test.cfg new file mode 100644 index 00000000000..e69de29bb2d diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg index 17861c90782..03ff6056a52 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 2a5fbd3f7c9..93a76e89119 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -30,7 +30,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 806c0082236..8c5d108cb58 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -56,7 +56,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=12 +COVERAGE_NODE=14 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 0201e9dfd71..0b3043d268c 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -49,7 +49,7 @@ npm run system-test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=12 +COVERAGE_NODE=14 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index a5c7ac04cd3..862d478d324 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -39,7 +39,7 @@ npm test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=12 +COVERAGE_NODE=14 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then From c4a7c93be8ede6acb857ba160af13cd92b1fa763 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 27 Jul 2023 14:59:28 -0400 Subject: [PATCH 224/333] build: add extra test for Node 20, update windows tests (#351) * build: add extra test for Node 20, update windows tests Source-Link: https://github.com/googleapis/synthtool/commit/38f5d4bfd5d51116a3cf7f260b8fe5d8a0046cfa Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:ef104a520c849ffde60495342ecf099dfb6256eab0fbd173228f447bc73d1aa9 Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/.kokoro/test.bat | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 9e959ba1520..c7d7a2025af 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:bfa6fdba19aa7d105167d01fb51f5fd8285e8cd9fca264e43aff849e9e7fa36c -# created: 2023-07-06T17:45:12.014855061Z + digest: sha256:ef104a520c849ffde60495342ecf099dfb6256eab0fbd173228f447bc73d1aa9 +# created: 2023-07-10T21:36:52.433664553Z \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/test.bat b/handwritten/bigquery-storage/.kokoro/test.bat index ae59e59be3e..0bb12405231 100644 --- a/handwritten/bigquery-storage/.kokoro/test.bat +++ b/handwritten/bigquery-storage/.kokoro/test.bat @@ -21,7 +21,7 @@ cd .. @rem we upgrade Node.js in the image: SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm -call nvm use v12.14.1 +call nvm use v14.17.3 call which node call npm install || goto :error From 0c796adca17acbbed5c552125508503db9f12f22 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 1 Aug 2023 14:40:01 -0400 Subject: [PATCH 225/333] chore!: migrate to node 14 (#354) Co-authored-by: Owl Bot --- .../.github/sync-repo-settings.yaml | 2 +- .../.kokoro/continuous/node12/common.cfg | 24 ------------------- .../.kokoro/continuous/node12/lint.cfg | 4 ---- .../continuous/node12/samples-test.cfg | 12 ---------- .../.kokoro/continuous/node12/system-test.cfg | 12 ---------- .../.kokoro/continuous/node12/test.cfg | 0 .../.kokoro/presubmit/node12/common.cfg | 24 ------------------- .../.kokoro/presubmit/node12/samples-test.cfg | 12 ---------- .../.kokoro/presubmit/node12/system-test.cfg | 12 ---------- .../.kokoro/presubmit/node12/test.cfg | 0 handwritten/bigquery-storage/package.json | 11 +++++---- .../src/v1/big_query_read_client.ts | 8 +++---- .../src/v1/big_query_write_client.ts | 20 ++++++++-------- .../src/v1beta1/big_query_storage_client.ts | 16 ++++++------- 14 files changed, 29 insertions(+), 128 deletions(-) delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg delete mode 100644 handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml index 4a30a08e54c..1350faeff2a 100644 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -9,9 +9,9 @@ branchProtectionRules: - "ci/kokoro: System test" - docs - lint - - test (12) - test (14) - test (16) + - test (18) - cla/google - windows - OwlBot Post Processor diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg deleted file mode 100644 index 7fc0cdeac69..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg deleted file mode 100644 index 0a5d546b96b..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg deleted file mode 100644 index 9ca77598360..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg deleted file mode 100644 index 42454cf416c..00000000000 --- a/handwritten/bigquery-storage/.kokoro/continuous/node12/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node12/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg deleted file mode 100644 index 7fc0cdeac69..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:12-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" -} diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg deleted file mode 100644 index 9ca77598360..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg deleted file mode 100644 index 42454cf416c..00000000000 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node12/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node12/test.cfg deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 9e96de96e4a..63e3ff3f242 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,28 +27,29 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^3.5.2" + "google-gax": "^4.0.3" }, "devDependencies": { "@types/mocha": "^9.0.0", "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", "c8": "^8.0.0", - "gts": "^3.1.0", + "gapic-tools": "^0.1.8", + "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^2.0.0", "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.0", + "linkinator": "^5.0.0", "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", "sinon": "^15.0.0", "ts-loader": "^9.0.0", - "typescript": "^4.8.3", + "typescript": "^5.1.6", "webpack": "^5.0.0", "webpack-cli": "^5.0.0" }, "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" } } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 92513560075..55dcdc0ebd7 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -420,7 +420,7 @@ export class BigQueryReadClient { | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest | undefined ), - {} | undefined + {} | undefined, ] >; createReadSession( @@ -469,7 +469,7 @@ export class BigQueryReadClient { | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -536,7 +536,7 @@ export class BigQueryReadClient { | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; splitReadStream( @@ -585,7 +585,7 @@ export class BigQueryReadClient { | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 225fe7a107e..6420025f874 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -394,7 +394,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; createWriteStream( @@ -443,7 +443,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -495,7 +495,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; getWriteStream( @@ -544,7 +544,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -594,7 +594,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; finalizeWriteStream( @@ -643,7 +643,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -699,7 +699,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest | undefined ), - {} | undefined + {} | undefined, ] >; batchCommitWriteStreams( @@ -748,7 +748,7 @@ export class BigQueryWriteClient { | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -807,7 +807,7 @@ export class BigQueryWriteClient { [ protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, - {} | undefined + {} | undefined, ] >; flushRows( @@ -853,7 +853,7 @@ export class BigQueryWriteClient { [ protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, - {} | undefined + {} | undefined, ] > | void { request = request || {}; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 51869099b26..1a4e3ef0ed5 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -414,7 +414,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest | undefined ), - {} | undefined + {} | undefined, ] >; createReadSession( @@ -463,7 +463,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -519,7 +519,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest | undefined ), - {} | undefined + {} | undefined, ] >; batchCreateReadSessionStreams( @@ -568,7 +568,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -633,7 +633,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; finalizeStream( @@ -682,7 +682,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; @@ -750,7 +750,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest | undefined ), - {} | undefined + {} | undefined, ] >; splitReadStream( @@ -799,7 +799,7 @@ export class BigQueryStorageClient { | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest | undefined ), - {} | undefined + {} | undefined, ] > | void { request = request || {}; From 21510ac06622b91e87a516c1f55595aaaaa5b134 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:35:48 -0400 Subject: [PATCH 226/333] chore(main): release 4.0.0 (#356) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/CHANGELOG.md | 11 +++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...etadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 59790bd297c..ddf45e00427 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [4.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.4.0...v4.0.0) (2023-08-01) + + +### ⚠ BREAKING CHANGES + +* migrate to node 14 ([#354](https://github.com/googleapis/nodejs-bigquery-storage/issues/354)) + +### Miscellaneous Chores + +* Migrate to node 14 ([#354](https://github.com/googleapis/nodejs-bigquery-storage/issues/354)) ([4423635](https://github.com/googleapis/nodejs-bigquery-storage/commit/4423635d2d815193be63e827c6360e9b9208dec1)) + ## [3.4.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.3.0...v3.4.0) (2023-06-05) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 63e3ff3f242..7edc6226051 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "3.4.0", + "version": "4.0.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 9b845d8bb33..26de33d3b16 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.4.0", + "version": "4.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 58a87a47a3b..9b3533d6c6a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "3.4.0", + "version": "4.0.0", "language": "TYPESCRIPT", "apis": [ { From 6e258f70681e7e9ff6e54a237047b5683cc0cff0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:04:07 -0400 Subject: [PATCH 227/333] chore(deps): upgrade dependencies for Node.js images (#357) Source-Link: https://github.com/googleapis/synthtool/commit/1dd93a39c888d50dfaf270d7fcd5b5feb66a4f7f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:3ad01f4c6671efb094b43f7d3a3e0b9510bd6501f2e65e874dd525373e29de75 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../bigquery-storage/protos/protos.d.ts | 1596 ++++++ handwritten/bigquery-storage/protos/protos.js | 4288 ++++++++++++++++- .../bigquery-storage/protos/protos.json | 304 ++ 4 files changed, 6093 insertions(+), 99 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index c7d7a2025af..0c47c8b71d4 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:ef104a520c849ffde60495342ecf099dfb6256eab0fbd173228f447bc73d1aa9 -# created: 2023-07-10T21:36:52.433664553Z \ No newline at end of file + digest: sha256:3ad01f4c6671efb094b43f7d3a3e0b9510bd6501f2e65e874dd525373e29de75 +# created: 2023-08-01T22:29:52.50398591Z diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 2e0c02d9802..085f8586882 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -8600,6 +8600,9 @@ export namespace google { /** MessageOptions mapEntry */ mapEntry?: (boolean|null); + /** MessageOptions deprecatedLegacyJsonFieldConflicts */ + deprecatedLegacyJsonFieldConflicts?: (boolean|null); + /** MessageOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -8628,6 +8631,9 @@ export namespace google { /** MessageOptions mapEntry. */ public mapEntry: boolean; + /** MessageOptions deprecatedLegacyJsonFieldConflicts. */ + public deprecatedLegacyJsonFieldConflicts: boolean; + /** MessageOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -8733,6 +8739,15 @@ export namespace google { /** FieldOptions weak */ weak?: (boolean|null); + /** FieldOptions debugRedact */ + debugRedact?: (boolean|null); + + /** FieldOptions retention */ + retention?: (google.protobuf.FieldOptions.OptionRetention|keyof typeof google.protobuf.FieldOptions.OptionRetention|null); + + /** FieldOptions target */ + target?: (google.protobuf.FieldOptions.OptionTargetType|keyof typeof google.protobuf.FieldOptions.OptionTargetType|null); + /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -8776,6 +8791,15 @@ export namespace google { /** FieldOptions weak. */ public weak: boolean; + /** FieldOptions debugRedact. */ + public debugRedact: boolean; + + /** FieldOptions retention. */ + public retention: (google.protobuf.FieldOptions.OptionRetention|keyof typeof google.protobuf.FieldOptions.OptionRetention); + + /** FieldOptions target. */ + public target: (google.protobuf.FieldOptions.OptionTargetType|keyof typeof google.protobuf.FieldOptions.OptionTargetType); + /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -8872,6 +8896,27 @@ export namespace google { JS_STRING = 1, JS_NUMBER = 2 } + + /** OptionRetention enum. */ + enum OptionRetention { + RETENTION_UNKNOWN = 0, + RETENTION_RUNTIME = 1, + RETENTION_SOURCE = 2 + } + + /** OptionTargetType enum. */ + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0, + TARGET_TYPE_FILE = 1, + TARGET_TYPE_EXTENSION_RANGE = 2, + TARGET_TYPE_MESSAGE = 3, + TARGET_TYPE_FIELD = 4, + TARGET_TYPE_ONEOF = 5, + TARGET_TYPE_ENUM = 6, + TARGET_TYPE_ENUM_ENTRY = 7, + TARGET_TYPE_SERVICE = 8, + TARGET_TYPE_METHOD = 9 + } } /** Properties of an OneofOptions. */ @@ -8980,6 +9025,9 @@ export namespace google { /** EnumOptions deprecated */ deprecated?: (boolean|null); + /** EnumOptions deprecatedLegacyJsonFieldConflicts */ + deprecatedLegacyJsonFieldConflicts?: (boolean|null); + /** EnumOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -8999,6 +9047,9 @@ export namespace google { /** EnumOptions deprecated. */ public deprecated: boolean; + /** EnumOptions deprecatedLegacyJsonFieldConflicts. */ + public deprecatedLegacyJsonFieldConflicts: boolean; + /** EnumOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -10108,6 +10159,109 @@ export namespace google { } } + /** Properties of a Duration. */ + interface IDuration { + + /** Duration seconds */ + seconds?: (number|Long|string|null); + + /** Duration nanos */ + nanos?: (number|null); + } + + /** Represents a Duration. */ + class Duration implements IDuration { + + /** + * Constructs a new Duration. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IDuration); + + /** Duration seconds. */ + public seconds: (number|Long|string); + + /** Duration nanos. */ + public nanos: number; + + /** + * Creates a new Duration instance using the specified properties. + * @param [properties] Properties to set + * @returns Duration instance + */ + public static create(properties?: google.protobuf.IDuration): google.protobuf.Duration; + + /** + * Encodes the specified Duration message. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. + * @param message Duration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IDuration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Duration message, length delimited. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. + * @param message Duration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IDuration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Duration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Duration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Duration; + + /** + * Decodes a Duration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Duration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Duration; + + /** + * Verifies a Duration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Duration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Duration + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Duration; + + /** + * Creates a plain object from a Duration message. Also converts values to other types if specified. + * @param message Duration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Duration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Duration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Duration + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** Properties of a Timestamp. */ interface ITimestamp { @@ -11642,6 +11796,1448 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** Properties of a CommonLanguageSettings. */ + interface ICommonLanguageSettings { + + /** CommonLanguageSettings referenceDocsUri */ + referenceDocsUri?: (string|null); + + /** CommonLanguageSettings destinations */ + destinations?: (google.api.ClientLibraryDestination[]|null); + } + + /** Represents a CommonLanguageSettings. */ + class CommonLanguageSettings implements ICommonLanguageSettings { + + /** + * Constructs a new CommonLanguageSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICommonLanguageSettings); + + /** CommonLanguageSettings referenceDocsUri. */ + public referenceDocsUri: string; + + /** CommonLanguageSettings destinations. */ + public destinations: google.api.ClientLibraryDestination[]; + + /** + * Creates a new CommonLanguageSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns CommonLanguageSettings instance + */ + public static create(properties?: google.api.ICommonLanguageSettings): google.api.CommonLanguageSettings; + + /** + * Encodes the specified CommonLanguageSettings message. Does not implicitly {@link google.api.CommonLanguageSettings.verify|verify} messages. + * @param message CommonLanguageSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICommonLanguageSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CommonLanguageSettings message, length delimited. Does not implicitly {@link google.api.CommonLanguageSettings.verify|verify} messages. + * @param message CommonLanguageSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICommonLanguageSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CommonLanguageSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CommonLanguageSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CommonLanguageSettings; + + /** + * Decodes a CommonLanguageSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CommonLanguageSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CommonLanguageSettings; + + /** + * Verifies a CommonLanguageSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CommonLanguageSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CommonLanguageSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.CommonLanguageSettings; + + /** + * Creates a plain object from a CommonLanguageSettings message. Also converts values to other types if specified. + * @param message CommonLanguageSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CommonLanguageSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CommonLanguageSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CommonLanguageSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ClientLibrarySettings. */ + interface IClientLibrarySettings { + + /** ClientLibrarySettings version */ + version?: (string|null); + + /** ClientLibrarySettings launchStage */ + launchStage?: (google.api.LaunchStage|keyof typeof google.api.LaunchStage|null); + + /** ClientLibrarySettings restNumericEnums */ + restNumericEnums?: (boolean|null); + + /** ClientLibrarySettings javaSettings */ + javaSettings?: (google.api.IJavaSettings|null); + + /** ClientLibrarySettings cppSettings */ + cppSettings?: (google.api.ICppSettings|null); + + /** ClientLibrarySettings phpSettings */ + phpSettings?: (google.api.IPhpSettings|null); + + /** ClientLibrarySettings pythonSettings */ + pythonSettings?: (google.api.IPythonSettings|null); + + /** ClientLibrarySettings nodeSettings */ + nodeSettings?: (google.api.INodeSettings|null); + + /** ClientLibrarySettings dotnetSettings */ + dotnetSettings?: (google.api.IDotnetSettings|null); + + /** ClientLibrarySettings rubySettings */ + rubySettings?: (google.api.IRubySettings|null); + + /** ClientLibrarySettings goSettings */ + goSettings?: (google.api.IGoSettings|null); + } + + /** Represents a ClientLibrarySettings. */ + class ClientLibrarySettings implements IClientLibrarySettings { + + /** + * Constructs a new ClientLibrarySettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IClientLibrarySettings); + + /** ClientLibrarySettings version. */ + public version: string; + + /** ClientLibrarySettings launchStage. */ + public launchStage: (google.api.LaunchStage|keyof typeof google.api.LaunchStage); + + /** ClientLibrarySettings restNumericEnums. */ + public restNumericEnums: boolean; + + /** ClientLibrarySettings javaSettings. */ + public javaSettings?: (google.api.IJavaSettings|null); + + /** ClientLibrarySettings cppSettings. */ + public cppSettings?: (google.api.ICppSettings|null); + + /** ClientLibrarySettings phpSettings. */ + public phpSettings?: (google.api.IPhpSettings|null); + + /** ClientLibrarySettings pythonSettings. */ + public pythonSettings?: (google.api.IPythonSettings|null); + + /** ClientLibrarySettings nodeSettings. */ + public nodeSettings?: (google.api.INodeSettings|null); + + /** ClientLibrarySettings dotnetSettings. */ + public dotnetSettings?: (google.api.IDotnetSettings|null); + + /** ClientLibrarySettings rubySettings. */ + public rubySettings?: (google.api.IRubySettings|null); + + /** ClientLibrarySettings goSettings. */ + public goSettings?: (google.api.IGoSettings|null); + + /** + * Creates a new ClientLibrarySettings instance using the specified properties. + * @param [properties] Properties to set + * @returns ClientLibrarySettings instance + */ + public static create(properties?: google.api.IClientLibrarySettings): google.api.ClientLibrarySettings; + + /** + * Encodes the specified ClientLibrarySettings message. Does not implicitly {@link google.api.ClientLibrarySettings.verify|verify} messages. + * @param message ClientLibrarySettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IClientLibrarySettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ClientLibrarySettings message, length delimited. Does not implicitly {@link google.api.ClientLibrarySettings.verify|verify} messages. + * @param message ClientLibrarySettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IClientLibrarySettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ClientLibrarySettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ClientLibrarySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.ClientLibrarySettings; + + /** + * Decodes a ClientLibrarySettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ClientLibrarySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.ClientLibrarySettings; + + /** + * Verifies a ClientLibrarySettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ClientLibrarySettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ClientLibrarySettings + */ + public static fromObject(object: { [k: string]: any }): google.api.ClientLibrarySettings; + + /** + * Creates a plain object from a ClientLibrarySettings message. Also converts values to other types if specified. + * @param message ClientLibrarySettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.ClientLibrarySettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ClientLibrarySettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ClientLibrarySettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Publishing. */ + interface IPublishing { + + /** Publishing methodSettings */ + methodSettings?: (google.api.IMethodSettings[]|null); + + /** Publishing newIssueUri */ + newIssueUri?: (string|null); + + /** Publishing documentationUri */ + documentationUri?: (string|null); + + /** Publishing apiShortName */ + apiShortName?: (string|null); + + /** Publishing githubLabel */ + githubLabel?: (string|null); + + /** Publishing codeownerGithubTeams */ + codeownerGithubTeams?: (string[]|null); + + /** Publishing docTagPrefix */ + docTagPrefix?: (string|null); + + /** Publishing organization */ + organization?: (google.api.ClientLibraryOrganization|keyof typeof google.api.ClientLibraryOrganization|null); + + /** Publishing librarySettings */ + librarySettings?: (google.api.IClientLibrarySettings[]|null); + } + + /** Represents a Publishing. */ + class Publishing implements IPublishing { + + /** + * Constructs a new Publishing. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IPublishing); + + /** Publishing methodSettings. */ + public methodSettings: google.api.IMethodSettings[]; + + /** Publishing newIssueUri. */ + public newIssueUri: string; + + /** Publishing documentationUri. */ + public documentationUri: string; + + /** Publishing apiShortName. */ + public apiShortName: string; + + /** Publishing githubLabel. */ + public githubLabel: string; + + /** Publishing codeownerGithubTeams. */ + public codeownerGithubTeams: string[]; + + /** Publishing docTagPrefix. */ + public docTagPrefix: string; + + /** Publishing organization. */ + public organization: (google.api.ClientLibraryOrganization|keyof typeof google.api.ClientLibraryOrganization); + + /** Publishing librarySettings. */ + public librarySettings: google.api.IClientLibrarySettings[]; + + /** + * Creates a new Publishing instance using the specified properties. + * @param [properties] Properties to set + * @returns Publishing instance + */ + public static create(properties?: google.api.IPublishing): google.api.Publishing; + + /** + * Encodes the specified Publishing message. Does not implicitly {@link google.api.Publishing.verify|verify} messages. + * @param message Publishing message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IPublishing, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Publishing message, length delimited. Does not implicitly {@link google.api.Publishing.verify|verify} messages. + * @param message Publishing message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IPublishing, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Publishing message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Publishing + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.Publishing; + + /** + * Decodes a Publishing message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Publishing + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.Publishing; + + /** + * Verifies a Publishing message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Publishing message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Publishing + */ + public static fromObject(object: { [k: string]: any }): google.api.Publishing; + + /** + * Creates a plain object from a Publishing message. Also converts values to other types if specified. + * @param message Publishing + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.Publishing, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Publishing to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Publishing + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a JavaSettings. */ + interface IJavaSettings { + + /** JavaSettings libraryPackage */ + libraryPackage?: (string|null); + + /** JavaSettings serviceClassNames */ + serviceClassNames?: ({ [k: string]: string }|null); + + /** JavaSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a JavaSettings. */ + class JavaSettings implements IJavaSettings { + + /** + * Constructs a new JavaSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IJavaSettings); + + /** JavaSettings libraryPackage. */ + public libraryPackage: string; + + /** JavaSettings serviceClassNames. */ + public serviceClassNames: { [k: string]: string }; + + /** JavaSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new JavaSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns JavaSettings instance + */ + public static create(properties?: google.api.IJavaSettings): google.api.JavaSettings; + + /** + * Encodes the specified JavaSettings message. Does not implicitly {@link google.api.JavaSettings.verify|verify} messages. + * @param message JavaSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IJavaSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified JavaSettings message, length delimited. Does not implicitly {@link google.api.JavaSettings.verify|verify} messages. + * @param message JavaSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IJavaSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a JavaSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns JavaSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.JavaSettings; + + /** + * Decodes a JavaSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns JavaSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.JavaSettings; + + /** + * Verifies a JavaSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a JavaSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns JavaSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.JavaSettings; + + /** + * Creates a plain object from a JavaSettings message. Also converts values to other types if specified. + * @param message JavaSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.JavaSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this JavaSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for JavaSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a CppSettings. */ + interface ICppSettings { + + /** CppSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a CppSettings. */ + class CppSettings implements ICppSettings { + + /** + * Constructs a new CppSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ICppSettings); + + /** CppSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new CppSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns CppSettings instance + */ + public static create(properties?: google.api.ICppSettings): google.api.CppSettings; + + /** + * Encodes the specified CppSettings message. Does not implicitly {@link google.api.CppSettings.verify|verify} messages. + * @param message CppSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ICppSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CppSettings message, length delimited. Does not implicitly {@link google.api.CppSettings.verify|verify} messages. + * @param message CppSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ICppSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CppSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CppSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.CppSettings; + + /** + * Decodes a CppSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CppSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.CppSettings; + + /** + * Verifies a CppSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CppSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CppSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.CppSettings; + + /** + * Creates a plain object from a CppSettings message. Also converts values to other types if specified. + * @param message CppSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.CppSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CppSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CppSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PhpSettings. */ + interface IPhpSettings { + + /** PhpSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a PhpSettings. */ + class PhpSettings implements IPhpSettings { + + /** + * Constructs a new PhpSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IPhpSettings); + + /** PhpSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new PhpSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns PhpSettings instance + */ + public static create(properties?: google.api.IPhpSettings): google.api.PhpSettings; + + /** + * Encodes the specified PhpSettings message. Does not implicitly {@link google.api.PhpSettings.verify|verify} messages. + * @param message PhpSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IPhpSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PhpSettings message, length delimited. Does not implicitly {@link google.api.PhpSettings.verify|verify} messages. + * @param message PhpSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IPhpSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PhpSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PhpSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PhpSettings; + + /** + * Decodes a PhpSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PhpSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PhpSettings; + + /** + * Verifies a PhpSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PhpSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PhpSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.PhpSettings; + + /** + * Creates a plain object from a PhpSettings message. Also converts values to other types if specified. + * @param message PhpSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.PhpSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PhpSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PhpSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PythonSettings. */ + interface IPythonSettings { + + /** PythonSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a PythonSettings. */ + class PythonSettings implements IPythonSettings { + + /** + * Constructs a new PythonSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IPythonSettings); + + /** PythonSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new PythonSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns PythonSettings instance + */ + public static create(properties?: google.api.IPythonSettings): google.api.PythonSettings; + + /** + * Encodes the specified PythonSettings message. Does not implicitly {@link google.api.PythonSettings.verify|verify} messages. + * @param message PythonSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IPythonSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PythonSettings message, length delimited. Does not implicitly {@link google.api.PythonSettings.verify|verify} messages. + * @param message PythonSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IPythonSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PythonSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PythonSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings; + + /** + * Decodes a PythonSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PythonSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings; + + /** + * Verifies a PythonSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PythonSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PythonSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.PythonSettings; + + /** + * Creates a plain object from a PythonSettings message. Also converts values to other types if specified. + * @param message PythonSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.PythonSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PythonSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PythonSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a NodeSettings. */ + interface INodeSettings { + + /** NodeSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a NodeSettings. */ + class NodeSettings implements INodeSettings { + + /** + * Constructs a new NodeSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.INodeSettings); + + /** NodeSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new NodeSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns NodeSettings instance + */ + public static create(properties?: google.api.INodeSettings): google.api.NodeSettings; + + /** + * Encodes the specified NodeSettings message. Does not implicitly {@link google.api.NodeSettings.verify|verify} messages. + * @param message NodeSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.INodeSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified NodeSettings message, length delimited. Does not implicitly {@link google.api.NodeSettings.verify|verify} messages. + * @param message NodeSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.INodeSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a NodeSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns NodeSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.NodeSettings; + + /** + * Decodes a NodeSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns NodeSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.NodeSettings; + + /** + * Verifies a NodeSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a NodeSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns NodeSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.NodeSettings; + + /** + * Creates a plain object from a NodeSettings message. Also converts values to other types if specified. + * @param message NodeSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.NodeSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this NodeSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for NodeSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a DotnetSettings. */ + interface IDotnetSettings { + + /** DotnetSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a DotnetSettings. */ + class DotnetSettings implements IDotnetSettings { + + /** + * Constructs a new DotnetSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IDotnetSettings); + + /** DotnetSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new DotnetSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns DotnetSettings instance + */ + public static create(properties?: google.api.IDotnetSettings): google.api.DotnetSettings; + + /** + * Encodes the specified DotnetSettings message. Does not implicitly {@link google.api.DotnetSettings.verify|verify} messages. + * @param message DotnetSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IDotnetSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DotnetSettings message, length delimited. Does not implicitly {@link google.api.DotnetSettings.verify|verify} messages. + * @param message DotnetSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IDotnetSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DotnetSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DotnetSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.DotnetSettings; + + /** + * Decodes a DotnetSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DotnetSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.DotnetSettings; + + /** + * Verifies a DotnetSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DotnetSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DotnetSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.DotnetSettings; + + /** + * Creates a plain object from a DotnetSettings message. Also converts values to other types if specified. + * @param message DotnetSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.DotnetSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DotnetSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DotnetSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a RubySettings. */ + interface IRubySettings { + + /** RubySettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a RubySettings. */ + class RubySettings implements IRubySettings { + + /** + * Constructs a new RubySettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IRubySettings); + + /** RubySettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new RubySettings instance using the specified properties. + * @param [properties] Properties to set + * @returns RubySettings instance + */ + public static create(properties?: google.api.IRubySettings): google.api.RubySettings; + + /** + * Encodes the specified RubySettings message. Does not implicitly {@link google.api.RubySettings.verify|verify} messages. + * @param message RubySettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IRubySettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified RubySettings message, length delimited. Does not implicitly {@link google.api.RubySettings.verify|verify} messages. + * @param message RubySettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IRubySettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a RubySettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns RubySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.RubySettings; + + /** + * Decodes a RubySettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns RubySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.RubySettings; + + /** + * Verifies a RubySettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a RubySettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns RubySettings + */ + public static fromObject(object: { [k: string]: any }): google.api.RubySettings; + + /** + * Creates a plain object from a RubySettings message. Also converts values to other types if specified. + * @param message RubySettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.RubySettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this RubySettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for RubySettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a GoSettings. */ + interface IGoSettings { + + /** GoSettings common */ + common?: (google.api.ICommonLanguageSettings|null); + } + + /** Represents a GoSettings. */ + class GoSettings implements IGoSettings { + + /** + * Constructs a new GoSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IGoSettings); + + /** GoSettings common. */ + public common?: (google.api.ICommonLanguageSettings|null); + + /** + * Creates a new GoSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns GoSettings instance + */ + public static create(properties?: google.api.IGoSettings): google.api.GoSettings; + + /** + * Encodes the specified GoSettings message. Does not implicitly {@link google.api.GoSettings.verify|verify} messages. + * @param message GoSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IGoSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified GoSettings message, length delimited. Does not implicitly {@link google.api.GoSettings.verify|verify} messages. + * @param message GoSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IGoSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GoSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GoSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.GoSettings; + + /** + * Decodes a GoSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns GoSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.GoSettings; + + /** + * Verifies a GoSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a GoSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns GoSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.GoSettings; + + /** + * Creates a plain object from a GoSettings message. Also converts values to other types if specified. + * @param message GoSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.GoSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this GoSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for GoSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MethodSettings. */ + interface IMethodSettings { + + /** MethodSettings selector */ + selector?: (string|null); + + /** MethodSettings longRunning */ + longRunning?: (google.api.MethodSettings.ILongRunning|null); + } + + /** Represents a MethodSettings. */ + class MethodSettings implements IMethodSettings { + + /** + * Constructs a new MethodSettings. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.IMethodSettings); + + /** MethodSettings selector. */ + public selector: string; + + /** MethodSettings longRunning. */ + public longRunning?: (google.api.MethodSettings.ILongRunning|null); + + /** + * Creates a new MethodSettings instance using the specified properties. + * @param [properties] Properties to set + * @returns MethodSettings instance + */ + public static create(properties?: google.api.IMethodSettings): google.api.MethodSettings; + + /** + * Encodes the specified MethodSettings message. Does not implicitly {@link google.api.MethodSettings.verify|verify} messages. + * @param message MethodSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.IMethodSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MethodSettings message, length delimited. Does not implicitly {@link google.api.MethodSettings.verify|verify} messages. + * @param message MethodSettings message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.IMethodSettings, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MethodSettings message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MethodSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.MethodSettings; + + /** + * Decodes a MethodSettings message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MethodSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.MethodSettings; + + /** + * Verifies a MethodSettings message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MethodSettings message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MethodSettings + */ + public static fromObject(object: { [k: string]: any }): google.api.MethodSettings; + + /** + * Creates a plain object from a MethodSettings message. Also converts values to other types if specified. + * @param message MethodSettings + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.MethodSettings, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MethodSettings to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MethodSettings + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace MethodSettings { + + /** Properties of a LongRunning. */ + interface ILongRunning { + + /** LongRunning initialPollDelay */ + initialPollDelay?: (google.protobuf.IDuration|null); + + /** LongRunning pollDelayMultiplier */ + pollDelayMultiplier?: (number|null); + + /** LongRunning maxPollDelay */ + maxPollDelay?: (google.protobuf.IDuration|null); + + /** LongRunning totalPollTimeout */ + totalPollTimeout?: (google.protobuf.IDuration|null); + } + + /** Represents a LongRunning. */ + class LongRunning implements ILongRunning { + + /** + * Constructs a new LongRunning. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.MethodSettings.ILongRunning); + + /** LongRunning initialPollDelay. */ + public initialPollDelay?: (google.protobuf.IDuration|null); + + /** LongRunning pollDelayMultiplier. */ + public pollDelayMultiplier: number; + + /** LongRunning maxPollDelay. */ + public maxPollDelay?: (google.protobuf.IDuration|null); + + /** LongRunning totalPollTimeout. */ + public totalPollTimeout?: (google.protobuf.IDuration|null); + + /** + * Creates a new LongRunning instance using the specified properties. + * @param [properties] Properties to set + * @returns LongRunning instance + */ + public static create(properties?: google.api.MethodSettings.ILongRunning): google.api.MethodSettings.LongRunning; + + /** + * Encodes the specified LongRunning message. Does not implicitly {@link google.api.MethodSettings.LongRunning.verify|verify} messages. + * @param message LongRunning message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.MethodSettings.ILongRunning, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified LongRunning message, length delimited. Does not implicitly {@link google.api.MethodSettings.LongRunning.verify|verify} messages. + * @param message LongRunning message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.MethodSettings.ILongRunning, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a LongRunning message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns LongRunning + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.MethodSettings.LongRunning; + + /** + * Decodes a LongRunning message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns LongRunning + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.MethodSettings.LongRunning; + + /** + * Verifies a LongRunning message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a LongRunning message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns LongRunning + */ + public static fromObject(object: { [k: string]: any }): google.api.MethodSettings.LongRunning; + + /** + * Creates a plain object from a LongRunning message. Also converts values to other types if specified. + * @param message LongRunning + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.MethodSettings.LongRunning, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this LongRunning to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for LongRunning + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + + /** ClientLibraryOrganization enum. */ + enum ClientLibraryOrganization { + CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0, + CLOUD = 1, + ADS = 2, + PHOTOS = 3, + STREET_VIEW = 4 + } + + /** ClientLibraryDestination enum. */ + enum ClientLibraryDestination { + CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0, + GITHUB = 10, + PACKAGE_MANAGER = 20 + } + + /** LaunchStage enum. */ + enum LaunchStage { + LAUNCH_STAGE_UNSPECIFIED = 0, + UNIMPLEMENTED = 6, + PRELAUNCH = 7, + EARLY_ACCESS = 1, + ALPHA = 2, + BETA = 3, + GA = 4, + DEPRECATED = 5 + } + /** FieldBehavior enum. */ enum FieldBehavior { FIELD_BEHAVIOR_UNSPECIFIED = 0, diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index c2ce9c76aa5..903e738c0a8 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -21621,6 +21621,7 @@ * @property {boolean|null} [noStandardDescriptorAccessor] MessageOptions noStandardDescriptorAccessor * @property {boolean|null} [deprecated] MessageOptions deprecated * @property {boolean|null} [mapEntry] MessageOptions mapEntry + * @property {boolean|null} [deprecatedLegacyJsonFieldConflicts] MessageOptions deprecatedLegacyJsonFieldConflicts * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource */ @@ -21673,6 +21674,14 @@ */ MessageOptions.prototype.mapEntry = false; + /** + * MessageOptions deprecatedLegacyJsonFieldConflicts. + * @member {boolean} deprecatedLegacyJsonFieldConflicts + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.deprecatedLegacyJsonFieldConflicts = false; + /** * MessageOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -21721,6 +21730,8 @@ writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); + if (message.deprecatedLegacyJsonFieldConflicts != null && Object.hasOwnProperty.call(message, "deprecatedLegacyJsonFieldConflicts")) + writer.uint32(/* id 11, wireType 0 =*/88).bool(message.deprecatedLegacyJsonFieldConflicts); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -21776,6 +21787,10 @@ message.mapEntry = reader.bool(); break; } + case 11: { + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -21833,6 +21848,9 @@ if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) if (typeof message.mapEntry !== "boolean") return "mapEntry: boolean expected"; + if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) + if (typeof message.deprecatedLegacyJsonFieldConflicts !== "boolean") + return "deprecatedLegacyJsonFieldConflicts: boolean expected"; if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -21870,6 +21888,8 @@ message.deprecated = Boolean(object.deprecated); if (object.mapEntry != null) message.mapEntry = Boolean(object.mapEntry); + if (object.deprecatedLegacyJsonFieldConflicts != null) + message.deprecatedLegacyJsonFieldConflicts = Boolean(object.deprecatedLegacyJsonFieldConflicts); if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); @@ -21908,6 +21928,7 @@ object.noStandardDescriptorAccessor = false; object.deprecated = false; object.mapEntry = false; + object.deprecatedLegacyJsonFieldConflicts = false; object[".google.api.resource"] = null; } if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) @@ -21918,6 +21939,8 @@ object.deprecated = message.deprecated; if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) object.mapEntry = message.mapEntry; + if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) + object.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -21970,6 +21993,9 @@ * @property {boolean|null} [unverifiedLazy] FieldOptions unverifiedLazy * @property {boolean|null} [deprecated] FieldOptions deprecated * @property {boolean|null} [weak] FieldOptions weak + * @property {boolean|null} [debugRedact] FieldOptions debugRedact + * @property {google.protobuf.FieldOptions.OptionRetention|null} [retention] FieldOptions retention + * @property {google.protobuf.FieldOptions.OptionTargetType|null} [target] FieldOptions target * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior @@ -22049,6 +22075,30 @@ */ FieldOptions.prototype.weak = false; + /** + * FieldOptions debugRedact. + * @member {boolean} debugRedact + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.debugRedact = false; + + /** + * FieldOptions retention. + * @member {google.protobuf.FieldOptions.OptionRetention} retention + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.retention = 0; + + /** + * FieldOptions target. + * @member {google.protobuf.FieldOptions.OptionTargetType} target + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.target = 0; + /** * FieldOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -22119,6 +22169,12 @@ writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); if (message.unverifiedLazy != null && Object.hasOwnProperty.call(message, "unverifiedLazy")) writer.uint32(/* id 15, wireType 0 =*/120).bool(message.unverifiedLazy); + if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) + writer.uint32(/* id 16, wireType 0 =*/128).bool(message.debugRedact); + if (message.retention != null && Object.hasOwnProperty.call(message, "retention")) + writer.uint32(/* id 17, wireType 0 =*/136).int32(message.retention); + if (message.target != null && Object.hasOwnProperty.call(message, "target")) + writer.uint32(/* id 18, wireType 0 =*/144).int32(message.target); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -22194,6 +22250,18 @@ message.weak = reader.bool(); break; } + case 16: { + message.debugRedact = reader.bool(); + break; + } + case 17: { + message.retention = reader.int32(); + break; + } + case 18: { + message.target = reader.int32(); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -22287,6 +22355,34 @@ if (message.weak != null && message.hasOwnProperty("weak")) if (typeof message.weak !== "boolean") return "weak: boolean expected"; + if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) + if (typeof message.debugRedact !== "boolean") + return "debugRedact: boolean expected"; + if (message.retention != null && message.hasOwnProperty("retention")) + switch (message.retention) { + default: + return "retention: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.target != null && message.hasOwnProperty("target")) + switch (message.target) { + default: + return "target: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + break; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -22387,6 +22483,76 @@ message.deprecated = Boolean(object.deprecated); if (object.weak != null) message.weak = Boolean(object.weak); + if (object.debugRedact != null) + message.debugRedact = Boolean(object.debugRedact); + switch (object.retention) { + default: + if (typeof object.retention === "number") { + message.retention = object.retention; + break; + } + break; + case "RETENTION_UNKNOWN": + case 0: + message.retention = 0; + break; + case "RETENTION_RUNTIME": + case 1: + message.retention = 1; + break; + case "RETENTION_SOURCE": + case 2: + message.retention = 2; + break; + } + switch (object.target) { + default: + if (typeof object.target === "number") { + message.target = object.target; + break; + } + break; + case "TARGET_TYPE_UNKNOWN": + case 0: + message.target = 0; + break; + case "TARGET_TYPE_FILE": + case 1: + message.target = 1; + break; + case "TARGET_TYPE_EXTENSION_RANGE": + case 2: + message.target = 2; + break; + case "TARGET_TYPE_MESSAGE": + case 3: + message.target = 3; + break; + case "TARGET_TYPE_FIELD": + case 4: + message.target = 4; + break; + case "TARGET_TYPE_ONEOF": + case 5: + message.target = 5; + break; + case "TARGET_TYPE_ENUM": + case 6: + message.target = 6; + break; + case "TARGET_TYPE_ENUM_ENTRY": + case 7: + message.target = 7; + break; + case "TARGET_TYPE_SERVICE": + case 8: + message.target = 8; + break; + case "TARGET_TYPE_METHOD": + case 9: + message.target = 9; + break; + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); @@ -22477,6 +22643,9 @@ object.jstype = options.enums === String ? "JS_NORMAL" : 0; object.weak = false; object.unverifiedLazy = false; + object.debugRedact = false; + object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; + object.target = options.enums === String ? "TARGET_TYPE_UNKNOWN" : 0; object[".google.api.resourceReference"] = null; object[".google.cloud.bigquery.storage.v1.columnName"] = null; } @@ -22494,6 +22663,12 @@ object.weak = message.weak; if (message.unverifiedLazy != null && message.hasOwnProperty("unverifiedLazy")) object.unverifiedLazy = message.unverifiedLazy; + if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) + object.debugRedact = message.debugRedact; + if (message.retention != null && message.hasOwnProperty("retention")) + object.retention = options.enums === String ? $root.google.protobuf.FieldOptions.OptionRetention[message.retention] === undefined ? message.retention : $root.google.protobuf.FieldOptions.OptionRetention[message.retention] : message.retention; + if (message.target != null && message.hasOwnProperty("target")) + object.target = options.enums === String ? $root.google.protobuf.FieldOptions.OptionTargetType[message.target] === undefined ? message.target : $root.google.protobuf.FieldOptions.OptionTargetType[message.target] : message.target; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -22569,6 +22744,52 @@ return values; })(); + /** + * OptionRetention enum. + * @name google.protobuf.FieldOptions.OptionRetention + * @enum {number} + * @property {number} RETENTION_UNKNOWN=0 RETENTION_UNKNOWN value + * @property {number} RETENTION_RUNTIME=1 RETENTION_RUNTIME value + * @property {number} RETENTION_SOURCE=2 RETENTION_SOURCE value + */ + FieldOptions.OptionRetention = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "RETENTION_UNKNOWN"] = 0; + values[valuesById[1] = "RETENTION_RUNTIME"] = 1; + values[valuesById[2] = "RETENTION_SOURCE"] = 2; + return values; + })(); + + /** + * OptionTargetType enum. + * @name google.protobuf.FieldOptions.OptionTargetType + * @enum {number} + * @property {number} TARGET_TYPE_UNKNOWN=0 TARGET_TYPE_UNKNOWN value + * @property {number} TARGET_TYPE_FILE=1 TARGET_TYPE_FILE value + * @property {number} TARGET_TYPE_EXTENSION_RANGE=2 TARGET_TYPE_EXTENSION_RANGE value + * @property {number} TARGET_TYPE_MESSAGE=3 TARGET_TYPE_MESSAGE value + * @property {number} TARGET_TYPE_FIELD=4 TARGET_TYPE_FIELD value + * @property {number} TARGET_TYPE_ONEOF=5 TARGET_TYPE_ONEOF value + * @property {number} TARGET_TYPE_ENUM=6 TARGET_TYPE_ENUM value + * @property {number} TARGET_TYPE_ENUM_ENTRY=7 TARGET_TYPE_ENUM_ENTRY value + * @property {number} TARGET_TYPE_SERVICE=8 TARGET_TYPE_SERVICE value + * @property {number} TARGET_TYPE_METHOD=9 TARGET_TYPE_METHOD value + */ + FieldOptions.OptionTargetType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "TARGET_TYPE_UNKNOWN"] = 0; + values[valuesById[1] = "TARGET_TYPE_FILE"] = 1; + values[valuesById[2] = "TARGET_TYPE_EXTENSION_RANGE"] = 2; + values[valuesById[3] = "TARGET_TYPE_MESSAGE"] = 3; + values[valuesById[4] = "TARGET_TYPE_FIELD"] = 4; + values[valuesById[5] = "TARGET_TYPE_ONEOF"] = 5; + values[valuesById[6] = "TARGET_TYPE_ENUM"] = 6; + values[valuesById[7] = "TARGET_TYPE_ENUM_ENTRY"] = 7; + values[valuesById[8] = "TARGET_TYPE_SERVICE"] = 8; + values[valuesById[9] = "TARGET_TYPE_METHOD"] = 9; + return values; + })(); + return FieldOptions; })(); @@ -22804,6 +23025,7 @@ * @interface IEnumOptions * @property {boolean|null} [allowAlias] EnumOptions allowAlias * @property {boolean|null} [deprecated] EnumOptions deprecated + * @property {boolean|null} [deprecatedLegacyJsonFieldConflicts] EnumOptions deprecatedLegacyJsonFieldConflicts * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption */ @@ -22839,6 +23061,14 @@ */ EnumOptions.prototype.deprecated = false; + /** + * EnumOptions deprecatedLegacyJsonFieldConflicts. + * @member {boolean} deprecatedLegacyJsonFieldConflicts + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.deprecatedLegacyJsonFieldConflicts = false; + /** * EnumOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -22875,6 +23105,8 @@ writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); + if (message.deprecatedLegacyJsonFieldConflicts != null && Object.hasOwnProperty.call(message, "deprecatedLegacyJsonFieldConflicts")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.deprecatedLegacyJsonFieldConflicts); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -22920,6 +23152,10 @@ message.deprecated = reader.bool(); break; } + case 6: { + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -22967,6 +23203,9 @@ if (message.deprecated != null && message.hasOwnProperty("deprecated")) if (typeof message.deprecated !== "boolean") return "deprecated: boolean expected"; + if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) + if (typeof message.deprecatedLegacyJsonFieldConflicts !== "boolean") + return "deprecatedLegacyJsonFieldConflicts: boolean expected"; if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -22995,6 +23234,8 @@ message.allowAlias = Boolean(object.allowAlias); if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); + if (object.deprecatedLegacyJsonFieldConflicts != null) + message.deprecatedLegacyJsonFieldConflicts = Boolean(object.deprecatedLegacyJsonFieldConflicts); if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); @@ -23026,11 +23267,14 @@ if (options.defaults) { object.allowAlias = false; object.deprecated = false; + object.deprecatedLegacyJsonFieldConflicts = false; } if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) object.allowAlias = message.allowAlias; if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; + if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) + object.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -25790,25 +26034,25 @@ return GeneratedCodeInfo; })(); - protobuf.Timestamp = (function() { + protobuf.Duration = (function() { /** - * Properties of a Timestamp. + * Properties of a Duration. * @memberof google.protobuf - * @interface ITimestamp - * @property {number|Long|null} [seconds] Timestamp seconds - * @property {number|null} [nanos] Timestamp nanos + * @interface IDuration + * @property {number|Long|null} [seconds] Duration seconds + * @property {number|null} [nanos] Duration nanos */ /** - * Constructs a new Timestamp. + * Constructs a new Duration. * @memberof google.protobuf - * @classdesc Represents a Timestamp. - * @implements ITimestamp + * @classdesc Represents a Duration. + * @implements IDuration * @constructor - * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @param {google.protobuf.IDuration=} [properties] Properties to set */ - function Timestamp(properties) { + function Duration(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -25816,43 +26060,43 @@ } /** - * Timestamp seconds. + * Duration seconds. * @member {number|Long} seconds - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @instance */ - Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + Duration.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Timestamp nanos. + * Duration nanos. * @member {number} nanos - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @instance */ - Timestamp.prototype.nanos = 0; + Duration.prototype.nanos = 0; /** - * Creates a new Timestamp instance using the specified properties. + * Creates a new Duration instance using the specified properties. * @function create - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - * @returns {google.protobuf.Timestamp} Timestamp instance + * @param {google.protobuf.IDuration=} [properties] Properties to set + * @returns {google.protobuf.Duration} Duration instance */ - Timestamp.create = function create(properties) { - return new Timestamp(properties); + Duration.create = function create(properties) { + return new Duration(properties); }; /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * Encodes the specified Duration message. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. * @function encode - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {google.protobuf.IDuration} message Duration message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Timestamp.encode = function encode(message, writer) { + Duration.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) @@ -25863,33 +26107,33 @@ }; /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * Encodes the specified Duration message, length delimited. Does not implicitly {@link google.protobuf.Duration.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {google.protobuf.IDuration} message Duration message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + Duration.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a Timestamp message from the specified reader or buffer. + * Decodes a Duration message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.protobuf.Duration} Duration * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Timestamp.decode = function decode(reader, length) { + Duration.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Duration(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -25910,30 +26154,30 @@ }; /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * Decodes a Duration message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.protobuf.Duration} Duration * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Timestamp.decodeDelimited = function decodeDelimited(reader) { + Duration.decodeDelimited = function decodeDelimited(reader) { if (!(reader instanceof $Reader)) reader = new $Reader(reader); return this.decode(reader, reader.uint32()); }; /** - * Verifies a Timestamp message. + * Verifies a Duration message. * @function verify - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {Object.} message Plain object to verify * @returns {string|null} `null` if valid, otherwise the reason why it is not */ - Timestamp.verify = function verify(message) { + Duration.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; if (message.seconds != null && message.hasOwnProperty("seconds")) @@ -25946,17 +26190,17 @@ }; /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * Creates a Duration message from a plain object. Also converts values to their respective internal types. * @function fromObject - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {Object.} object Plain object - * @returns {google.protobuf.Timestamp} Timestamp + * @returns {google.protobuf.Duration} Duration */ - Timestamp.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Timestamp) + Duration.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Duration) return object; - var message = new $root.google.protobuf.Timestamp(); + var message = new $root.google.protobuf.Duration(); if (object.seconds != null) if ($util.Long) (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; @@ -25972,15 +26216,15 @@ }; /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * Creates a plain object from a Duration message. Also converts values to other types if specified. * @function toObject - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static - * @param {google.protobuf.Timestamp} message Timestamp + * @param {google.protobuf.Duration} message Duration * @param {$protobuf.IConversionOptions} [options] Conversion options * @returns {Object.} Plain object */ - Timestamp.toObject = function toObject(message, options) { + Duration.toObject = function toObject(message, options) { if (!options) options = {}; var object = {}; @@ -26003,52 +26247,53 @@ }; /** - * Converts this Timestamp to JSON. + * Converts this Duration to JSON. * @function toJSON - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @instance * @returns {Object.} JSON object */ - Timestamp.prototype.toJSON = function toJSON() { + Duration.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; /** - * Gets the default type url for Timestamp + * Gets the default type url for Duration * @function getTypeUrl - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") * @returns {string} The default type url */ - Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + Duration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { if (typeUrlPrefix === undefined) { typeUrlPrefix = "type.googleapis.com"; } - return typeUrlPrefix + "/google.protobuf.Timestamp"; + return typeUrlPrefix + "/google.protobuf.Duration"; }; - return Timestamp; + return Duration; })(); - protobuf.DoubleValue = (function() { + protobuf.Timestamp = (function() { /** - * Properties of a DoubleValue. + * Properties of a Timestamp. * @memberof google.protobuf - * @interface IDoubleValue - * @property {number|null} [value] DoubleValue value + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos */ /** - * Constructs a new DoubleValue. + * Constructs a new Timestamp. * @memberof google.protobuf - * @classdesc Represents a DoubleValue. - * @implements IDoubleValue + * @classdesc Represents a Timestamp. + * @implements ITimestamp * @constructor - * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + * @param {google.protobuf.ITimestamp=} [properties] Properties to set */ - function DoubleValue(properties) { + function Timestamp(properties) { if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -26056,75 +26301,89 @@ } /** - * DoubleValue value. - * @member {number} value - * @memberof google.protobuf.DoubleValue + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp * @instance */ - DoubleValue.prototype.value = 0; + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; /** - * Creates a new DoubleValue instance using the specified properties. + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. * @function create - * @memberof google.protobuf.DoubleValue + * @memberof google.protobuf.Timestamp * @static - * @param {google.protobuf.IDoubleValue=} [properties] Properties to set - * @returns {google.protobuf.DoubleValue} DoubleValue instance + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance */ - DoubleValue.create = function create(properties) { - return new DoubleValue(properties); + Timestamp.create = function create(properties) { + return new Timestamp(properties); }; /** - * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. * @function encode - * @memberof google.protobuf.DoubleValue + * @memberof google.protobuf.Timestamp * @static - * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - DoubleValue.encode = function encode(message, writer) { + Timestamp.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && Object.hasOwnProperty.call(message, "value")) - writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); return writer; }; /** - * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. * @function encodeDelimited - * @memberof google.protobuf.DoubleValue + * @memberof google.protobuf.Timestamp * @static - * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode * @param {$protobuf.Writer} [writer] Writer to encode to * @returns {$protobuf.Writer} Writer */ - DoubleValue.encodeDelimited = function encodeDelimited(message, writer) { + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { return this.encode(message, writer).ldelim(); }; /** - * Decodes a DoubleValue message from the specified reader or buffer. + * Decodes a Timestamp message from the specified reader or buffer. * @function decode - * @memberof google.protobuf.DoubleValue + * @memberof google.protobuf.Timestamp * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.DoubleValue} DoubleValue + * @returns {google.protobuf.Timestamp} Timestamp * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DoubleValue.decode = function decode(reader, length) { + Timestamp.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: { - message.value = reader.double(); + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); break; } default: @@ -26136,9 +26395,235 @@ }; /** - * Decodes a DoubleValue message from the specified reader or buffer, length delimited. + * Decodes a Timestamp message from the specified reader or buffer, length delimited. * @function decodeDelimited - * @memberof google.protobuf.DoubleValue + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + + return Timestamp; + })(); + + protobuf.DoubleValue = (function() { + + /** + * Properties of a DoubleValue. + * @memberof google.protobuf + * @interface IDoubleValue + * @property {number|null} [value] DoubleValue value + */ + + /** + * Constructs a new DoubleValue. + * @memberof google.protobuf + * @classdesc Represents a DoubleValue. + * @implements IDoubleValue + * @constructor + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + */ + function DoubleValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DoubleValue value. + * @member {number} value + * @memberof google.protobuf.DoubleValue + * @instance + */ + DoubleValue.prototype.value = 0; + + /** + * Creates a new DoubleValue instance using the specified properties. + * @function create + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue=} [properties] Properties to set + * @returns {google.protobuf.DoubleValue} DoubleValue instance + */ + DoubleValue.create = function create(properties) { + return new DoubleValue(properties); + }; + + /** + * Encodes the specified DoubleValue message. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @function encode + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DoubleValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); + return writer; + }; + + /** + * Encodes the specified DoubleValue message, length delimited. Does not implicitly {@link google.protobuf.DoubleValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.DoubleValue + * @static + * @param {google.protobuf.IDoubleValue} message DoubleValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DoubleValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DoubleValue message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.DoubleValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.DoubleValue} DoubleValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DoubleValue.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.value = reader.double(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DoubleValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.DoubleValue * @static * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from * @returns {google.protobuf.DoubleValue} DoubleValue @@ -29281,6 +29766,3615 @@ return CustomHttpPattern; })(); + api.CommonLanguageSettings = (function() { + + /** + * Properties of a CommonLanguageSettings. + * @memberof google.api + * @interface ICommonLanguageSettings + * @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri + * @property {Array.|null} [destinations] CommonLanguageSettings destinations + */ + + /** + * Constructs a new CommonLanguageSettings. + * @memberof google.api + * @classdesc Represents a CommonLanguageSettings. + * @implements ICommonLanguageSettings + * @constructor + * @param {google.api.ICommonLanguageSettings=} [properties] Properties to set + */ + function CommonLanguageSettings(properties) { + this.destinations = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CommonLanguageSettings referenceDocsUri. + * @member {string} referenceDocsUri + * @memberof google.api.CommonLanguageSettings + * @instance + */ + CommonLanguageSettings.prototype.referenceDocsUri = ""; + + /** + * CommonLanguageSettings destinations. + * @member {Array.} destinations + * @memberof google.api.CommonLanguageSettings + * @instance + */ + CommonLanguageSettings.prototype.destinations = $util.emptyArray; + + /** + * Creates a new CommonLanguageSettings instance using the specified properties. + * @function create + * @memberof google.api.CommonLanguageSettings + * @static + * @param {google.api.ICommonLanguageSettings=} [properties] Properties to set + * @returns {google.api.CommonLanguageSettings} CommonLanguageSettings instance + */ + CommonLanguageSettings.create = function create(properties) { + return new CommonLanguageSettings(properties); + }; + + /** + * Encodes the specified CommonLanguageSettings message. Does not implicitly {@link google.api.CommonLanguageSettings.verify|verify} messages. + * @function encode + * @memberof google.api.CommonLanguageSettings + * @static + * @param {google.api.ICommonLanguageSettings} message CommonLanguageSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CommonLanguageSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.referenceDocsUri != null && Object.hasOwnProperty.call(message, "referenceDocsUri")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.referenceDocsUri); + if (message.destinations != null && message.destinations.length) { + writer.uint32(/* id 2, wireType 2 =*/18).fork(); + for (var i = 0; i < message.destinations.length; ++i) + writer.int32(message.destinations[i]); + writer.ldelim(); + } + return writer; + }; + + /** + * Encodes the specified CommonLanguageSettings message, length delimited. Does not implicitly {@link google.api.CommonLanguageSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.CommonLanguageSettings + * @static + * @param {google.api.ICommonLanguageSettings} message CommonLanguageSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CommonLanguageSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CommonLanguageSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.CommonLanguageSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.CommonLanguageSettings} CommonLanguageSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CommonLanguageSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CommonLanguageSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.referenceDocsUri = reader.string(); + break; + } + case 2: { + if (!(message.destinations && message.destinations.length)) + message.destinations = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.destinations.push(reader.int32()); + } else + message.destinations.push(reader.int32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CommonLanguageSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.CommonLanguageSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.CommonLanguageSettings} CommonLanguageSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CommonLanguageSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CommonLanguageSettings message. + * @function verify + * @memberof google.api.CommonLanguageSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CommonLanguageSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) + if (!$util.isString(message.referenceDocsUri)) + return "referenceDocsUri: string expected"; + if (message.destinations != null && message.hasOwnProperty("destinations")) { + if (!Array.isArray(message.destinations)) + return "destinations: array expected"; + for (var i = 0; i < message.destinations.length; ++i) + switch (message.destinations[i]) { + default: + return "destinations: enum value[] expected"; + case 0: + case 10: + case 20: + break; + } + } + return null; + }; + + /** + * Creates a CommonLanguageSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.CommonLanguageSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.CommonLanguageSettings} CommonLanguageSettings + */ + CommonLanguageSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.CommonLanguageSettings) + return object; + var message = new $root.google.api.CommonLanguageSettings(); + if (object.referenceDocsUri != null) + message.referenceDocsUri = String(object.referenceDocsUri); + if (object.destinations) { + if (!Array.isArray(object.destinations)) + throw TypeError(".google.api.CommonLanguageSettings.destinations: array expected"); + message.destinations = []; + for (var i = 0; i < object.destinations.length; ++i) + switch (object.destinations[i]) { + default: + if (typeof object.destinations[i] === "number") { + message.destinations[i] = object.destinations[i]; + break; + } + case "CLIENT_LIBRARY_DESTINATION_UNSPECIFIED": + case 0: + message.destinations[i] = 0; + break; + case "GITHUB": + case 10: + message.destinations[i] = 10; + break; + case "PACKAGE_MANAGER": + case 20: + message.destinations[i] = 20; + break; + } + } + return message; + }; + + /** + * Creates a plain object from a CommonLanguageSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.CommonLanguageSettings + * @static + * @param {google.api.CommonLanguageSettings} message CommonLanguageSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CommonLanguageSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.destinations = []; + if (options.defaults) + object.referenceDocsUri = ""; + if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) + object.referenceDocsUri = message.referenceDocsUri; + if (message.destinations && message.destinations.length) { + object.destinations = []; + for (var j = 0; j < message.destinations.length; ++j) + object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j]; + } + return object; + }; + + /** + * Converts this CommonLanguageSettings to JSON. + * @function toJSON + * @memberof google.api.CommonLanguageSettings + * @instance + * @returns {Object.} JSON object + */ + CommonLanguageSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CommonLanguageSettings + * @function getTypeUrl + * @memberof google.api.CommonLanguageSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CommonLanguageSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.CommonLanguageSettings"; + }; + + return CommonLanguageSettings; + })(); + + api.ClientLibrarySettings = (function() { + + /** + * Properties of a ClientLibrarySettings. + * @memberof google.api + * @interface IClientLibrarySettings + * @property {string|null} [version] ClientLibrarySettings version + * @property {google.api.LaunchStage|null} [launchStage] ClientLibrarySettings launchStage + * @property {boolean|null} [restNumericEnums] ClientLibrarySettings restNumericEnums + * @property {google.api.IJavaSettings|null} [javaSettings] ClientLibrarySettings javaSettings + * @property {google.api.ICppSettings|null} [cppSettings] ClientLibrarySettings cppSettings + * @property {google.api.IPhpSettings|null} [phpSettings] ClientLibrarySettings phpSettings + * @property {google.api.IPythonSettings|null} [pythonSettings] ClientLibrarySettings pythonSettings + * @property {google.api.INodeSettings|null} [nodeSettings] ClientLibrarySettings nodeSettings + * @property {google.api.IDotnetSettings|null} [dotnetSettings] ClientLibrarySettings dotnetSettings + * @property {google.api.IRubySettings|null} [rubySettings] ClientLibrarySettings rubySettings + * @property {google.api.IGoSettings|null} [goSettings] ClientLibrarySettings goSettings + */ + + /** + * Constructs a new ClientLibrarySettings. + * @memberof google.api + * @classdesc Represents a ClientLibrarySettings. + * @implements IClientLibrarySettings + * @constructor + * @param {google.api.IClientLibrarySettings=} [properties] Properties to set + */ + function ClientLibrarySettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ClientLibrarySettings version. + * @member {string} version + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.version = ""; + + /** + * ClientLibrarySettings launchStage. + * @member {google.api.LaunchStage} launchStage + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.launchStage = 0; + + /** + * ClientLibrarySettings restNumericEnums. + * @member {boolean} restNumericEnums + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.restNumericEnums = false; + + /** + * ClientLibrarySettings javaSettings. + * @member {google.api.IJavaSettings|null|undefined} javaSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.javaSettings = null; + + /** + * ClientLibrarySettings cppSettings. + * @member {google.api.ICppSettings|null|undefined} cppSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.cppSettings = null; + + /** + * ClientLibrarySettings phpSettings. + * @member {google.api.IPhpSettings|null|undefined} phpSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.phpSettings = null; + + /** + * ClientLibrarySettings pythonSettings. + * @member {google.api.IPythonSettings|null|undefined} pythonSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.pythonSettings = null; + + /** + * ClientLibrarySettings nodeSettings. + * @member {google.api.INodeSettings|null|undefined} nodeSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.nodeSettings = null; + + /** + * ClientLibrarySettings dotnetSettings. + * @member {google.api.IDotnetSettings|null|undefined} dotnetSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.dotnetSettings = null; + + /** + * ClientLibrarySettings rubySettings. + * @member {google.api.IRubySettings|null|undefined} rubySettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.rubySettings = null; + + /** + * ClientLibrarySettings goSettings. + * @member {google.api.IGoSettings|null|undefined} goSettings + * @memberof google.api.ClientLibrarySettings + * @instance + */ + ClientLibrarySettings.prototype.goSettings = null; + + /** + * Creates a new ClientLibrarySettings instance using the specified properties. + * @function create + * @memberof google.api.ClientLibrarySettings + * @static + * @param {google.api.IClientLibrarySettings=} [properties] Properties to set + * @returns {google.api.ClientLibrarySettings} ClientLibrarySettings instance + */ + ClientLibrarySettings.create = function create(properties) { + return new ClientLibrarySettings(properties); + }; + + /** + * Encodes the specified ClientLibrarySettings message. Does not implicitly {@link google.api.ClientLibrarySettings.verify|verify} messages. + * @function encode + * @memberof google.api.ClientLibrarySettings + * @static + * @param {google.api.IClientLibrarySettings} message ClientLibrarySettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ClientLibrarySettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.version != null && Object.hasOwnProperty.call(message, "version")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.version); + if (message.launchStage != null && Object.hasOwnProperty.call(message, "launchStage")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.launchStage); + if (message.restNumericEnums != null && Object.hasOwnProperty.call(message, "restNumericEnums")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.restNumericEnums); + if (message.javaSettings != null && Object.hasOwnProperty.call(message, "javaSettings")) + $root.google.api.JavaSettings.encode(message.javaSettings, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); + if (message.cppSettings != null && Object.hasOwnProperty.call(message, "cppSettings")) + $root.google.api.CppSettings.encode(message.cppSettings, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim(); + if (message.phpSettings != null && Object.hasOwnProperty.call(message, "phpSettings")) + $root.google.api.PhpSettings.encode(message.phpSettings, writer.uint32(/* id 23, wireType 2 =*/186).fork()).ldelim(); + if (message.pythonSettings != null && Object.hasOwnProperty.call(message, "pythonSettings")) + $root.google.api.PythonSettings.encode(message.pythonSettings, writer.uint32(/* id 24, wireType 2 =*/194).fork()).ldelim(); + if (message.nodeSettings != null && Object.hasOwnProperty.call(message, "nodeSettings")) + $root.google.api.NodeSettings.encode(message.nodeSettings, writer.uint32(/* id 25, wireType 2 =*/202).fork()).ldelim(); + if (message.dotnetSettings != null && Object.hasOwnProperty.call(message, "dotnetSettings")) + $root.google.api.DotnetSettings.encode(message.dotnetSettings, writer.uint32(/* id 26, wireType 2 =*/210).fork()).ldelim(); + if (message.rubySettings != null && Object.hasOwnProperty.call(message, "rubySettings")) + $root.google.api.RubySettings.encode(message.rubySettings, writer.uint32(/* id 27, wireType 2 =*/218).fork()).ldelim(); + if (message.goSettings != null && Object.hasOwnProperty.call(message, "goSettings")) + $root.google.api.GoSettings.encode(message.goSettings, writer.uint32(/* id 28, wireType 2 =*/226).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ClientLibrarySettings message, length delimited. Does not implicitly {@link google.api.ClientLibrarySettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.ClientLibrarySettings + * @static + * @param {google.api.IClientLibrarySettings} message ClientLibrarySettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ClientLibrarySettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ClientLibrarySettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.ClientLibrarySettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.ClientLibrarySettings} ClientLibrarySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ClientLibrarySettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ClientLibrarySettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.version = reader.string(); + break; + } + case 2: { + message.launchStage = reader.int32(); + break; + } + case 3: { + message.restNumericEnums = reader.bool(); + break; + } + case 21: { + message.javaSettings = $root.google.api.JavaSettings.decode(reader, reader.uint32()); + break; + } + case 22: { + message.cppSettings = $root.google.api.CppSettings.decode(reader, reader.uint32()); + break; + } + case 23: { + message.phpSettings = $root.google.api.PhpSettings.decode(reader, reader.uint32()); + break; + } + case 24: { + message.pythonSettings = $root.google.api.PythonSettings.decode(reader, reader.uint32()); + break; + } + case 25: { + message.nodeSettings = $root.google.api.NodeSettings.decode(reader, reader.uint32()); + break; + } + case 26: { + message.dotnetSettings = $root.google.api.DotnetSettings.decode(reader, reader.uint32()); + break; + } + case 27: { + message.rubySettings = $root.google.api.RubySettings.decode(reader, reader.uint32()); + break; + } + case 28: { + message.goSettings = $root.google.api.GoSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ClientLibrarySettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.ClientLibrarySettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.ClientLibrarySettings} ClientLibrarySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ClientLibrarySettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ClientLibrarySettings message. + * @function verify + * @memberof google.api.ClientLibrarySettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ClientLibrarySettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.version != null && message.hasOwnProperty("version")) + if (!$util.isString(message.version)) + return "version: string expected"; + if (message.launchStage != null && message.hasOwnProperty("launchStage")) + switch (message.launchStage) { + default: + return "launchStage: enum value expected"; + case 0: + case 6: + case 7: + case 1: + case 2: + case 3: + case 4: + case 5: + break; + } + if (message.restNumericEnums != null && message.hasOwnProperty("restNumericEnums")) + if (typeof message.restNumericEnums !== "boolean") + return "restNumericEnums: boolean expected"; + if (message.javaSettings != null && message.hasOwnProperty("javaSettings")) { + var error = $root.google.api.JavaSettings.verify(message.javaSettings); + if (error) + return "javaSettings." + error; + } + if (message.cppSettings != null && message.hasOwnProperty("cppSettings")) { + var error = $root.google.api.CppSettings.verify(message.cppSettings); + if (error) + return "cppSettings." + error; + } + if (message.phpSettings != null && message.hasOwnProperty("phpSettings")) { + var error = $root.google.api.PhpSettings.verify(message.phpSettings); + if (error) + return "phpSettings." + error; + } + if (message.pythonSettings != null && message.hasOwnProperty("pythonSettings")) { + var error = $root.google.api.PythonSettings.verify(message.pythonSettings); + if (error) + return "pythonSettings." + error; + } + if (message.nodeSettings != null && message.hasOwnProperty("nodeSettings")) { + var error = $root.google.api.NodeSettings.verify(message.nodeSettings); + if (error) + return "nodeSettings." + error; + } + if (message.dotnetSettings != null && message.hasOwnProperty("dotnetSettings")) { + var error = $root.google.api.DotnetSettings.verify(message.dotnetSettings); + if (error) + return "dotnetSettings." + error; + } + if (message.rubySettings != null && message.hasOwnProperty("rubySettings")) { + var error = $root.google.api.RubySettings.verify(message.rubySettings); + if (error) + return "rubySettings." + error; + } + if (message.goSettings != null && message.hasOwnProperty("goSettings")) { + var error = $root.google.api.GoSettings.verify(message.goSettings); + if (error) + return "goSettings." + error; + } + return null; + }; + + /** + * Creates a ClientLibrarySettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.ClientLibrarySettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.ClientLibrarySettings} ClientLibrarySettings + */ + ClientLibrarySettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.ClientLibrarySettings) + return object; + var message = new $root.google.api.ClientLibrarySettings(); + if (object.version != null) + message.version = String(object.version); + switch (object.launchStage) { + default: + if (typeof object.launchStage === "number") { + message.launchStage = object.launchStage; + break; + } + break; + case "LAUNCH_STAGE_UNSPECIFIED": + case 0: + message.launchStage = 0; + break; + case "UNIMPLEMENTED": + case 6: + message.launchStage = 6; + break; + case "PRELAUNCH": + case 7: + message.launchStage = 7; + break; + case "EARLY_ACCESS": + case 1: + message.launchStage = 1; + break; + case "ALPHA": + case 2: + message.launchStage = 2; + break; + case "BETA": + case 3: + message.launchStage = 3; + break; + case "GA": + case 4: + message.launchStage = 4; + break; + case "DEPRECATED": + case 5: + message.launchStage = 5; + break; + } + if (object.restNumericEnums != null) + message.restNumericEnums = Boolean(object.restNumericEnums); + if (object.javaSettings != null) { + if (typeof object.javaSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.javaSettings: object expected"); + message.javaSettings = $root.google.api.JavaSettings.fromObject(object.javaSettings); + } + if (object.cppSettings != null) { + if (typeof object.cppSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.cppSettings: object expected"); + message.cppSettings = $root.google.api.CppSettings.fromObject(object.cppSettings); + } + if (object.phpSettings != null) { + if (typeof object.phpSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.phpSettings: object expected"); + message.phpSettings = $root.google.api.PhpSettings.fromObject(object.phpSettings); + } + if (object.pythonSettings != null) { + if (typeof object.pythonSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.pythonSettings: object expected"); + message.pythonSettings = $root.google.api.PythonSettings.fromObject(object.pythonSettings); + } + if (object.nodeSettings != null) { + if (typeof object.nodeSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.nodeSettings: object expected"); + message.nodeSettings = $root.google.api.NodeSettings.fromObject(object.nodeSettings); + } + if (object.dotnetSettings != null) { + if (typeof object.dotnetSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.dotnetSettings: object expected"); + message.dotnetSettings = $root.google.api.DotnetSettings.fromObject(object.dotnetSettings); + } + if (object.rubySettings != null) { + if (typeof object.rubySettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.rubySettings: object expected"); + message.rubySettings = $root.google.api.RubySettings.fromObject(object.rubySettings); + } + if (object.goSettings != null) { + if (typeof object.goSettings !== "object") + throw TypeError(".google.api.ClientLibrarySettings.goSettings: object expected"); + message.goSettings = $root.google.api.GoSettings.fromObject(object.goSettings); + } + return message; + }; + + /** + * Creates a plain object from a ClientLibrarySettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.ClientLibrarySettings + * @static + * @param {google.api.ClientLibrarySettings} message ClientLibrarySettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ClientLibrarySettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.version = ""; + object.launchStage = options.enums === String ? "LAUNCH_STAGE_UNSPECIFIED" : 0; + object.restNumericEnums = false; + object.javaSettings = null; + object.cppSettings = null; + object.phpSettings = null; + object.pythonSettings = null; + object.nodeSettings = null; + object.dotnetSettings = null; + object.rubySettings = null; + object.goSettings = null; + } + if (message.version != null && message.hasOwnProperty("version")) + object.version = message.version; + if (message.launchStage != null && message.hasOwnProperty("launchStage")) + object.launchStage = options.enums === String ? $root.google.api.LaunchStage[message.launchStage] === undefined ? message.launchStage : $root.google.api.LaunchStage[message.launchStage] : message.launchStage; + if (message.restNumericEnums != null && message.hasOwnProperty("restNumericEnums")) + object.restNumericEnums = message.restNumericEnums; + if (message.javaSettings != null && message.hasOwnProperty("javaSettings")) + object.javaSettings = $root.google.api.JavaSettings.toObject(message.javaSettings, options); + if (message.cppSettings != null && message.hasOwnProperty("cppSettings")) + object.cppSettings = $root.google.api.CppSettings.toObject(message.cppSettings, options); + if (message.phpSettings != null && message.hasOwnProperty("phpSettings")) + object.phpSettings = $root.google.api.PhpSettings.toObject(message.phpSettings, options); + if (message.pythonSettings != null && message.hasOwnProperty("pythonSettings")) + object.pythonSettings = $root.google.api.PythonSettings.toObject(message.pythonSettings, options); + if (message.nodeSettings != null && message.hasOwnProperty("nodeSettings")) + object.nodeSettings = $root.google.api.NodeSettings.toObject(message.nodeSettings, options); + if (message.dotnetSettings != null && message.hasOwnProperty("dotnetSettings")) + object.dotnetSettings = $root.google.api.DotnetSettings.toObject(message.dotnetSettings, options); + if (message.rubySettings != null && message.hasOwnProperty("rubySettings")) + object.rubySettings = $root.google.api.RubySettings.toObject(message.rubySettings, options); + if (message.goSettings != null && message.hasOwnProperty("goSettings")) + object.goSettings = $root.google.api.GoSettings.toObject(message.goSettings, options); + return object; + }; + + /** + * Converts this ClientLibrarySettings to JSON. + * @function toJSON + * @memberof google.api.ClientLibrarySettings + * @instance + * @returns {Object.} JSON object + */ + ClientLibrarySettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ClientLibrarySettings + * @function getTypeUrl + * @memberof google.api.ClientLibrarySettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ClientLibrarySettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.ClientLibrarySettings"; + }; + + return ClientLibrarySettings; + })(); + + api.Publishing = (function() { + + /** + * Properties of a Publishing. + * @memberof google.api + * @interface IPublishing + * @property {Array.|null} [methodSettings] Publishing methodSettings + * @property {string|null} [newIssueUri] Publishing newIssueUri + * @property {string|null} [documentationUri] Publishing documentationUri + * @property {string|null} [apiShortName] Publishing apiShortName + * @property {string|null} [githubLabel] Publishing githubLabel + * @property {Array.|null} [codeownerGithubTeams] Publishing codeownerGithubTeams + * @property {string|null} [docTagPrefix] Publishing docTagPrefix + * @property {google.api.ClientLibraryOrganization|null} [organization] Publishing organization + * @property {Array.|null} [librarySettings] Publishing librarySettings + */ + + /** + * Constructs a new Publishing. + * @memberof google.api + * @classdesc Represents a Publishing. + * @implements IPublishing + * @constructor + * @param {google.api.IPublishing=} [properties] Properties to set + */ + function Publishing(properties) { + this.methodSettings = []; + this.codeownerGithubTeams = []; + this.librarySettings = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Publishing methodSettings. + * @member {Array.} methodSettings + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.methodSettings = $util.emptyArray; + + /** + * Publishing newIssueUri. + * @member {string} newIssueUri + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.newIssueUri = ""; + + /** + * Publishing documentationUri. + * @member {string} documentationUri + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.documentationUri = ""; + + /** + * Publishing apiShortName. + * @member {string} apiShortName + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.apiShortName = ""; + + /** + * Publishing githubLabel. + * @member {string} githubLabel + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.githubLabel = ""; + + /** + * Publishing codeownerGithubTeams. + * @member {Array.} codeownerGithubTeams + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.codeownerGithubTeams = $util.emptyArray; + + /** + * Publishing docTagPrefix. + * @member {string} docTagPrefix + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.docTagPrefix = ""; + + /** + * Publishing organization. + * @member {google.api.ClientLibraryOrganization} organization + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.organization = 0; + + /** + * Publishing librarySettings. + * @member {Array.} librarySettings + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.librarySettings = $util.emptyArray; + + /** + * Creates a new Publishing instance using the specified properties. + * @function create + * @memberof google.api.Publishing + * @static + * @param {google.api.IPublishing=} [properties] Properties to set + * @returns {google.api.Publishing} Publishing instance + */ + Publishing.create = function create(properties) { + return new Publishing(properties); + }; + + /** + * Encodes the specified Publishing message. Does not implicitly {@link google.api.Publishing.verify|verify} messages. + * @function encode + * @memberof google.api.Publishing + * @static + * @param {google.api.IPublishing} message Publishing message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Publishing.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.methodSettings != null && message.methodSettings.length) + for (var i = 0; i < message.methodSettings.length; ++i) + $root.google.api.MethodSettings.encode(message.methodSettings[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.newIssueUri != null && Object.hasOwnProperty.call(message, "newIssueUri")) + writer.uint32(/* id 101, wireType 2 =*/810).string(message.newIssueUri); + if (message.documentationUri != null && Object.hasOwnProperty.call(message, "documentationUri")) + writer.uint32(/* id 102, wireType 2 =*/818).string(message.documentationUri); + if (message.apiShortName != null && Object.hasOwnProperty.call(message, "apiShortName")) + writer.uint32(/* id 103, wireType 2 =*/826).string(message.apiShortName); + if (message.githubLabel != null && Object.hasOwnProperty.call(message, "githubLabel")) + writer.uint32(/* id 104, wireType 2 =*/834).string(message.githubLabel); + if (message.codeownerGithubTeams != null && message.codeownerGithubTeams.length) + for (var i = 0; i < message.codeownerGithubTeams.length; ++i) + writer.uint32(/* id 105, wireType 2 =*/842).string(message.codeownerGithubTeams[i]); + if (message.docTagPrefix != null && Object.hasOwnProperty.call(message, "docTagPrefix")) + writer.uint32(/* id 106, wireType 2 =*/850).string(message.docTagPrefix); + if (message.organization != null && Object.hasOwnProperty.call(message, "organization")) + writer.uint32(/* id 107, wireType 0 =*/856).int32(message.organization); + if (message.librarySettings != null && message.librarySettings.length) + for (var i = 0; i < message.librarySettings.length; ++i) + $root.google.api.ClientLibrarySettings.encode(message.librarySettings[i], writer.uint32(/* id 109, wireType 2 =*/874).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Publishing message, length delimited. Does not implicitly {@link google.api.Publishing.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.Publishing + * @static + * @param {google.api.IPublishing} message Publishing message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Publishing.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Publishing message from the specified reader or buffer. + * @function decode + * @memberof google.api.Publishing + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.Publishing} Publishing + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Publishing.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Publishing(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (!(message.methodSettings && message.methodSettings.length)) + message.methodSettings = []; + message.methodSettings.push($root.google.api.MethodSettings.decode(reader, reader.uint32())); + break; + } + case 101: { + message.newIssueUri = reader.string(); + break; + } + case 102: { + message.documentationUri = reader.string(); + break; + } + case 103: { + message.apiShortName = reader.string(); + break; + } + case 104: { + message.githubLabel = reader.string(); + break; + } + case 105: { + if (!(message.codeownerGithubTeams && message.codeownerGithubTeams.length)) + message.codeownerGithubTeams = []; + message.codeownerGithubTeams.push(reader.string()); + break; + } + case 106: { + message.docTagPrefix = reader.string(); + break; + } + case 107: { + message.organization = reader.int32(); + break; + } + case 109: { + if (!(message.librarySettings && message.librarySettings.length)) + message.librarySettings = []; + message.librarySettings.push($root.google.api.ClientLibrarySettings.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Publishing message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.Publishing + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.Publishing} Publishing + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Publishing.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Publishing message. + * @function verify + * @memberof google.api.Publishing + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Publishing.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.methodSettings != null && message.hasOwnProperty("methodSettings")) { + if (!Array.isArray(message.methodSettings)) + return "methodSettings: array expected"; + for (var i = 0; i < message.methodSettings.length; ++i) { + var error = $root.google.api.MethodSettings.verify(message.methodSettings[i]); + if (error) + return "methodSettings." + error; + } + } + if (message.newIssueUri != null && message.hasOwnProperty("newIssueUri")) + if (!$util.isString(message.newIssueUri)) + return "newIssueUri: string expected"; + if (message.documentationUri != null && message.hasOwnProperty("documentationUri")) + if (!$util.isString(message.documentationUri)) + return "documentationUri: string expected"; + if (message.apiShortName != null && message.hasOwnProperty("apiShortName")) + if (!$util.isString(message.apiShortName)) + return "apiShortName: string expected"; + if (message.githubLabel != null && message.hasOwnProperty("githubLabel")) + if (!$util.isString(message.githubLabel)) + return "githubLabel: string expected"; + if (message.codeownerGithubTeams != null && message.hasOwnProperty("codeownerGithubTeams")) { + if (!Array.isArray(message.codeownerGithubTeams)) + return "codeownerGithubTeams: array expected"; + for (var i = 0; i < message.codeownerGithubTeams.length; ++i) + if (!$util.isString(message.codeownerGithubTeams[i])) + return "codeownerGithubTeams: string[] expected"; + } + if (message.docTagPrefix != null && message.hasOwnProperty("docTagPrefix")) + if (!$util.isString(message.docTagPrefix)) + return "docTagPrefix: string expected"; + if (message.organization != null && message.hasOwnProperty("organization")) + switch (message.organization) { + default: + return "organization: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + break; + } + if (message.librarySettings != null && message.hasOwnProperty("librarySettings")) { + if (!Array.isArray(message.librarySettings)) + return "librarySettings: array expected"; + for (var i = 0; i < message.librarySettings.length; ++i) { + var error = $root.google.api.ClientLibrarySettings.verify(message.librarySettings[i]); + if (error) + return "librarySettings." + error; + } + } + return null; + }; + + /** + * Creates a Publishing message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.Publishing + * @static + * @param {Object.} object Plain object + * @returns {google.api.Publishing} Publishing + */ + Publishing.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.Publishing) + return object; + var message = new $root.google.api.Publishing(); + if (object.methodSettings) { + if (!Array.isArray(object.methodSettings)) + throw TypeError(".google.api.Publishing.methodSettings: array expected"); + message.methodSettings = []; + for (var i = 0; i < object.methodSettings.length; ++i) { + if (typeof object.methodSettings[i] !== "object") + throw TypeError(".google.api.Publishing.methodSettings: object expected"); + message.methodSettings[i] = $root.google.api.MethodSettings.fromObject(object.methodSettings[i]); + } + } + if (object.newIssueUri != null) + message.newIssueUri = String(object.newIssueUri); + if (object.documentationUri != null) + message.documentationUri = String(object.documentationUri); + if (object.apiShortName != null) + message.apiShortName = String(object.apiShortName); + if (object.githubLabel != null) + message.githubLabel = String(object.githubLabel); + if (object.codeownerGithubTeams) { + if (!Array.isArray(object.codeownerGithubTeams)) + throw TypeError(".google.api.Publishing.codeownerGithubTeams: array expected"); + message.codeownerGithubTeams = []; + for (var i = 0; i < object.codeownerGithubTeams.length; ++i) + message.codeownerGithubTeams[i] = String(object.codeownerGithubTeams[i]); + } + if (object.docTagPrefix != null) + message.docTagPrefix = String(object.docTagPrefix); + switch (object.organization) { + default: + if (typeof object.organization === "number") { + message.organization = object.organization; + break; + } + break; + case "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED": + case 0: + message.organization = 0; + break; + case "CLOUD": + case 1: + message.organization = 1; + break; + case "ADS": + case 2: + message.organization = 2; + break; + case "PHOTOS": + case 3: + message.organization = 3; + break; + case "STREET_VIEW": + case 4: + message.organization = 4; + break; + } + if (object.librarySettings) { + if (!Array.isArray(object.librarySettings)) + throw TypeError(".google.api.Publishing.librarySettings: array expected"); + message.librarySettings = []; + for (var i = 0; i < object.librarySettings.length; ++i) { + if (typeof object.librarySettings[i] !== "object") + throw TypeError(".google.api.Publishing.librarySettings: object expected"); + message.librarySettings[i] = $root.google.api.ClientLibrarySettings.fromObject(object.librarySettings[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a Publishing message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.Publishing + * @static + * @param {google.api.Publishing} message Publishing + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Publishing.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.methodSettings = []; + object.codeownerGithubTeams = []; + object.librarySettings = []; + } + if (options.defaults) { + object.newIssueUri = ""; + object.documentationUri = ""; + object.apiShortName = ""; + object.githubLabel = ""; + object.docTagPrefix = ""; + object.organization = options.enums === String ? "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED" : 0; + } + if (message.methodSettings && message.methodSettings.length) { + object.methodSettings = []; + for (var j = 0; j < message.methodSettings.length; ++j) + object.methodSettings[j] = $root.google.api.MethodSettings.toObject(message.methodSettings[j], options); + } + if (message.newIssueUri != null && message.hasOwnProperty("newIssueUri")) + object.newIssueUri = message.newIssueUri; + if (message.documentationUri != null && message.hasOwnProperty("documentationUri")) + object.documentationUri = message.documentationUri; + if (message.apiShortName != null && message.hasOwnProperty("apiShortName")) + object.apiShortName = message.apiShortName; + if (message.githubLabel != null && message.hasOwnProperty("githubLabel")) + object.githubLabel = message.githubLabel; + if (message.codeownerGithubTeams && message.codeownerGithubTeams.length) { + object.codeownerGithubTeams = []; + for (var j = 0; j < message.codeownerGithubTeams.length; ++j) + object.codeownerGithubTeams[j] = message.codeownerGithubTeams[j]; + } + if (message.docTagPrefix != null && message.hasOwnProperty("docTagPrefix")) + object.docTagPrefix = message.docTagPrefix; + if (message.organization != null && message.hasOwnProperty("organization")) + object.organization = options.enums === String ? $root.google.api.ClientLibraryOrganization[message.organization] === undefined ? message.organization : $root.google.api.ClientLibraryOrganization[message.organization] : message.organization; + if (message.librarySettings && message.librarySettings.length) { + object.librarySettings = []; + for (var j = 0; j < message.librarySettings.length; ++j) + object.librarySettings[j] = $root.google.api.ClientLibrarySettings.toObject(message.librarySettings[j], options); + } + return object; + }; + + /** + * Converts this Publishing to JSON. + * @function toJSON + * @memberof google.api.Publishing + * @instance + * @returns {Object.} JSON object + */ + Publishing.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Publishing + * @function getTypeUrl + * @memberof google.api.Publishing + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Publishing.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.Publishing"; + }; + + return Publishing; + })(); + + api.JavaSettings = (function() { + + /** + * Properties of a JavaSettings. + * @memberof google.api + * @interface IJavaSettings + * @property {string|null} [libraryPackage] JavaSettings libraryPackage + * @property {Object.|null} [serviceClassNames] JavaSettings serviceClassNames + * @property {google.api.ICommonLanguageSettings|null} [common] JavaSettings common + */ + + /** + * Constructs a new JavaSettings. + * @memberof google.api + * @classdesc Represents a JavaSettings. + * @implements IJavaSettings + * @constructor + * @param {google.api.IJavaSettings=} [properties] Properties to set + */ + function JavaSettings(properties) { + this.serviceClassNames = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * JavaSettings libraryPackage. + * @member {string} libraryPackage + * @memberof google.api.JavaSettings + * @instance + */ + JavaSettings.prototype.libraryPackage = ""; + + /** + * JavaSettings serviceClassNames. + * @member {Object.} serviceClassNames + * @memberof google.api.JavaSettings + * @instance + */ + JavaSettings.prototype.serviceClassNames = $util.emptyObject; + + /** + * JavaSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.JavaSettings + * @instance + */ + JavaSettings.prototype.common = null; + + /** + * Creates a new JavaSettings instance using the specified properties. + * @function create + * @memberof google.api.JavaSettings + * @static + * @param {google.api.IJavaSettings=} [properties] Properties to set + * @returns {google.api.JavaSettings} JavaSettings instance + */ + JavaSettings.create = function create(properties) { + return new JavaSettings(properties); + }; + + /** + * Encodes the specified JavaSettings message. Does not implicitly {@link google.api.JavaSettings.verify|verify} messages. + * @function encode + * @memberof google.api.JavaSettings + * @static + * @param {google.api.IJavaSettings} message JavaSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + JavaSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.libraryPackage != null && Object.hasOwnProperty.call(message, "libraryPackage")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.libraryPackage); + if (message.serviceClassNames != null && Object.hasOwnProperty.call(message, "serviceClassNames")) + for (var keys = Object.keys(message.serviceClassNames), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.serviceClassNames[keys[i]]).ldelim(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified JavaSettings message, length delimited. Does not implicitly {@link google.api.JavaSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.JavaSettings + * @static + * @param {google.api.IJavaSettings} message JavaSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + JavaSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a JavaSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.JavaSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.JavaSettings} JavaSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + JavaSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.JavaSettings(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.libraryPackage = reader.string(); + break; + } + case 2: { + if (message.serviceClassNames === $util.emptyObject) + message.serviceClassNames = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.serviceClassNames[key] = value; + break; + } + case 3: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a JavaSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.JavaSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.JavaSettings} JavaSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + JavaSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a JavaSettings message. + * @function verify + * @memberof google.api.JavaSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + JavaSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.libraryPackage != null && message.hasOwnProperty("libraryPackage")) + if (!$util.isString(message.libraryPackage)) + return "libraryPackage: string expected"; + if (message.serviceClassNames != null && message.hasOwnProperty("serviceClassNames")) { + if (!$util.isObject(message.serviceClassNames)) + return "serviceClassNames: object expected"; + var key = Object.keys(message.serviceClassNames); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.serviceClassNames[key[i]])) + return "serviceClassNames: string{k:string} expected"; + } + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a JavaSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.JavaSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.JavaSettings} JavaSettings + */ + JavaSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.JavaSettings) + return object; + var message = new $root.google.api.JavaSettings(); + if (object.libraryPackage != null) + message.libraryPackage = String(object.libraryPackage); + if (object.serviceClassNames) { + if (typeof object.serviceClassNames !== "object") + throw TypeError(".google.api.JavaSettings.serviceClassNames: object expected"); + message.serviceClassNames = {}; + for (var keys = Object.keys(object.serviceClassNames), i = 0; i < keys.length; ++i) + message.serviceClassNames[keys[i]] = String(object.serviceClassNames[keys[i]]); + } + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.JavaSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a JavaSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.JavaSettings + * @static + * @param {google.api.JavaSettings} message JavaSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + JavaSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.serviceClassNames = {}; + if (options.defaults) { + object.libraryPackage = ""; + object.common = null; + } + if (message.libraryPackage != null && message.hasOwnProperty("libraryPackage")) + object.libraryPackage = message.libraryPackage; + var keys2; + if (message.serviceClassNames && (keys2 = Object.keys(message.serviceClassNames)).length) { + object.serviceClassNames = {}; + for (var j = 0; j < keys2.length; ++j) + object.serviceClassNames[keys2[j]] = message.serviceClassNames[keys2[j]]; + } + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this JavaSettings to JSON. + * @function toJSON + * @memberof google.api.JavaSettings + * @instance + * @returns {Object.} JSON object + */ + JavaSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for JavaSettings + * @function getTypeUrl + * @memberof google.api.JavaSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + JavaSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.JavaSettings"; + }; + + return JavaSettings; + })(); + + api.CppSettings = (function() { + + /** + * Properties of a CppSettings. + * @memberof google.api + * @interface ICppSettings + * @property {google.api.ICommonLanguageSettings|null} [common] CppSettings common + */ + + /** + * Constructs a new CppSettings. + * @memberof google.api + * @classdesc Represents a CppSettings. + * @implements ICppSettings + * @constructor + * @param {google.api.ICppSettings=} [properties] Properties to set + */ + function CppSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CppSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.CppSettings + * @instance + */ + CppSettings.prototype.common = null; + + /** + * Creates a new CppSettings instance using the specified properties. + * @function create + * @memberof google.api.CppSettings + * @static + * @param {google.api.ICppSettings=} [properties] Properties to set + * @returns {google.api.CppSettings} CppSettings instance + */ + CppSettings.create = function create(properties) { + return new CppSettings(properties); + }; + + /** + * Encodes the specified CppSettings message. Does not implicitly {@link google.api.CppSettings.verify|verify} messages. + * @function encode + * @memberof google.api.CppSettings + * @static + * @param {google.api.ICppSettings} message CppSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CppSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified CppSettings message, length delimited. Does not implicitly {@link google.api.CppSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.CppSettings + * @static + * @param {google.api.ICppSettings} message CppSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CppSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CppSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.CppSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.CppSettings} CppSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CppSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CppSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CppSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.CppSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.CppSettings} CppSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CppSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CppSettings message. + * @function verify + * @memberof google.api.CppSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CppSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a CppSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.CppSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.CppSettings} CppSettings + */ + CppSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.CppSettings) + return object; + var message = new $root.google.api.CppSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.CppSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a CppSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.CppSettings + * @static + * @param {google.api.CppSettings} message CppSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CppSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this CppSettings to JSON. + * @function toJSON + * @memberof google.api.CppSettings + * @instance + * @returns {Object.} JSON object + */ + CppSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CppSettings + * @function getTypeUrl + * @memberof google.api.CppSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CppSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.CppSettings"; + }; + + return CppSettings; + })(); + + api.PhpSettings = (function() { + + /** + * Properties of a PhpSettings. + * @memberof google.api + * @interface IPhpSettings + * @property {google.api.ICommonLanguageSettings|null} [common] PhpSettings common + */ + + /** + * Constructs a new PhpSettings. + * @memberof google.api + * @classdesc Represents a PhpSettings. + * @implements IPhpSettings + * @constructor + * @param {google.api.IPhpSettings=} [properties] Properties to set + */ + function PhpSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PhpSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.PhpSettings + * @instance + */ + PhpSettings.prototype.common = null; + + /** + * Creates a new PhpSettings instance using the specified properties. + * @function create + * @memberof google.api.PhpSettings + * @static + * @param {google.api.IPhpSettings=} [properties] Properties to set + * @returns {google.api.PhpSettings} PhpSettings instance + */ + PhpSettings.create = function create(properties) { + return new PhpSettings(properties); + }; + + /** + * Encodes the specified PhpSettings message. Does not implicitly {@link google.api.PhpSettings.verify|verify} messages. + * @function encode + * @memberof google.api.PhpSettings + * @static + * @param {google.api.IPhpSettings} message PhpSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PhpSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified PhpSettings message, length delimited. Does not implicitly {@link google.api.PhpSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.PhpSettings + * @static + * @param {google.api.IPhpSettings} message PhpSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PhpSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PhpSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.PhpSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.PhpSettings} PhpSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PhpSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PhpSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PhpSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.PhpSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.PhpSettings} PhpSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PhpSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PhpSettings message. + * @function verify + * @memberof google.api.PhpSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PhpSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a PhpSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.PhpSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.PhpSettings} PhpSettings + */ + PhpSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.PhpSettings) + return object; + var message = new $root.google.api.PhpSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.PhpSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a PhpSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.PhpSettings + * @static + * @param {google.api.PhpSettings} message PhpSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PhpSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this PhpSettings to JSON. + * @function toJSON + * @memberof google.api.PhpSettings + * @instance + * @returns {Object.} JSON object + */ + PhpSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PhpSettings + * @function getTypeUrl + * @memberof google.api.PhpSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PhpSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PhpSettings"; + }; + + return PhpSettings; + })(); + + api.PythonSettings = (function() { + + /** + * Properties of a PythonSettings. + * @memberof google.api + * @interface IPythonSettings + * @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common + */ + + /** + * Constructs a new PythonSettings. + * @memberof google.api + * @classdesc Represents a PythonSettings. + * @implements IPythonSettings + * @constructor + * @param {google.api.IPythonSettings=} [properties] Properties to set + */ + function PythonSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PythonSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.PythonSettings + * @instance + */ + PythonSettings.prototype.common = null; + + /** + * Creates a new PythonSettings instance using the specified properties. + * @function create + * @memberof google.api.PythonSettings + * @static + * @param {google.api.IPythonSettings=} [properties] Properties to set + * @returns {google.api.PythonSettings} PythonSettings instance + */ + PythonSettings.create = function create(properties) { + return new PythonSettings(properties); + }; + + /** + * Encodes the specified PythonSettings message. Does not implicitly {@link google.api.PythonSettings.verify|verify} messages. + * @function encode + * @memberof google.api.PythonSettings + * @static + * @param {google.api.IPythonSettings} message PythonSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PythonSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified PythonSettings message, length delimited. Does not implicitly {@link google.api.PythonSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.PythonSettings + * @static + * @param {google.api.IPythonSettings} message PythonSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PythonSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PythonSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.PythonSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.PythonSettings} PythonSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PythonSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PythonSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.PythonSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.PythonSettings} PythonSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PythonSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PythonSettings message. + * @function verify + * @memberof google.api.PythonSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PythonSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a PythonSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.PythonSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.PythonSettings} PythonSettings + */ + PythonSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.PythonSettings) + return object; + var message = new $root.google.api.PythonSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.PythonSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a PythonSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.PythonSettings + * @static + * @param {google.api.PythonSettings} message PythonSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PythonSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this PythonSettings to JSON. + * @function toJSON + * @memberof google.api.PythonSettings + * @instance + * @returns {Object.} JSON object + */ + PythonSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PythonSettings + * @function getTypeUrl + * @memberof google.api.PythonSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PythonSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PythonSettings"; + }; + + return PythonSettings; + })(); + + api.NodeSettings = (function() { + + /** + * Properties of a NodeSettings. + * @memberof google.api + * @interface INodeSettings + * @property {google.api.ICommonLanguageSettings|null} [common] NodeSettings common + */ + + /** + * Constructs a new NodeSettings. + * @memberof google.api + * @classdesc Represents a NodeSettings. + * @implements INodeSettings + * @constructor + * @param {google.api.INodeSettings=} [properties] Properties to set + */ + function NodeSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * NodeSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.NodeSettings + * @instance + */ + NodeSettings.prototype.common = null; + + /** + * Creates a new NodeSettings instance using the specified properties. + * @function create + * @memberof google.api.NodeSettings + * @static + * @param {google.api.INodeSettings=} [properties] Properties to set + * @returns {google.api.NodeSettings} NodeSettings instance + */ + NodeSettings.create = function create(properties) { + return new NodeSettings(properties); + }; + + /** + * Encodes the specified NodeSettings message. Does not implicitly {@link google.api.NodeSettings.verify|verify} messages. + * @function encode + * @memberof google.api.NodeSettings + * @static + * @param {google.api.INodeSettings} message NodeSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NodeSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified NodeSettings message, length delimited. Does not implicitly {@link google.api.NodeSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.NodeSettings + * @static + * @param {google.api.INodeSettings} message NodeSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + NodeSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a NodeSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.NodeSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.NodeSettings} NodeSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NodeSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.NodeSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a NodeSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.NodeSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.NodeSettings} NodeSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NodeSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a NodeSettings message. + * @function verify + * @memberof google.api.NodeSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + NodeSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a NodeSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.NodeSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.NodeSettings} NodeSettings + */ + NodeSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.NodeSettings) + return object; + var message = new $root.google.api.NodeSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.NodeSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a NodeSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.NodeSettings + * @static + * @param {google.api.NodeSettings} message NodeSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + NodeSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this NodeSettings to JSON. + * @function toJSON + * @memberof google.api.NodeSettings + * @instance + * @returns {Object.} JSON object + */ + NodeSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for NodeSettings + * @function getTypeUrl + * @memberof google.api.NodeSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + NodeSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.NodeSettings"; + }; + + return NodeSettings; + })(); + + api.DotnetSettings = (function() { + + /** + * Properties of a DotnetSettings. + * @memberof google.api + * @interface IDotnetSettings + * @property {google.api.ICommonLanguageSettings|null} [common] DotnetSettings common + */ + + /** + * Constructs a new DotnetSettings. + * @memberof google.api + * @classdesc Represents a DotnetSettings. + * @implements IDotnetSettings + * @constructor + * @param {google.api.IDotnetSettings=} [properties] Properties to set + */ + function DotnetSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DotnetSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.common = null; + + /** + * Creates a new DotnetSettings instance using the specified properties. + * @function create + * @memberof google.api.DotnetSettings + * @static + * @param {google.api.IDotnetSettings=} [properties] Properties to set + * @returns {google.api.DotnetSettings} DotnetSettings instance + */ + DotnetSettings.create = function create(properties) { + return new DotnetSettings(properties); + }; + + /** + * Encodes the specified DotnetSettings message. Does not implicitly {@link google.api.DotnetSettings.verify|verify} messages. + * @function encode + * @memberof google.api.DotnetSettings + * @static + * @param {google.api.IDotnetSettings} message DotnetSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DotnetSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified DotnetSettings message, length delimited. Does not implicitly {@link google.api.DotnetSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.DotnetSettings + * @static + * @param {google.api.IDotnetSettings} message DotnetSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DotnetSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DotnetSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.DotnetSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.DotnetSettings} DotnetSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DotnetSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.DotnetSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DotnetSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.DotnetSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.DotnetSettings} DotnetSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DotnetSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DotnetSettings message. + * @function verify + * @memberof google.api.DotnetSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DotnetSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a DotnetSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.DotnetSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.DotnetSettings} DotnetSettings + */ + DotnetSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.DotnetSettings) + return object; + var message = new $root.google.api.DotnetSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.DotnetSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a DotnetSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.DotnetSettings + * @static + * @param {google.api.DotnetSettings} message DotnetSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DotnetSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this DotnetSettings to JSON. + * @function toJSON + * @memberof google.api.DotnetSettings + * @instance + * @returns {Object.} JSON object + */ + DotnetSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DotnetSettings + * @function getTypeUrl + * @memberof google.api.DotnetSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DotnetSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.DotnetSettings"; + }; + + return DotnetSettings; + })(); + + api.RubySettings = (function() { + + /** + * Properties of a RubySettings. + * @memberof google.api + * @interface IRubySettings + * @property {google.api.ICommonLanguageSettings|null} [common] RubySettings common + */ + + /** + * Constructs a new RubySettings. + * @memberof google.api + * @classdesc Represents a RubySettings. + * @implements IRubySettings + * @constructor + * @param {google.api.IRubySettings=} [properties] Properties to set + */ + function RubySettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * RubySettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.RubySettings + * @instance + */ + RubySettings.prototype.common = null; + + /** + * Creates a new RubySettings instance using the specified properties. + * @function create + * @memberof google.api.RubySettings + * @static + * @param {google.api.IRubySettings=} [properties] Properties to set + * @returns {google.api.RubySettings} RubySettings instance + */ + RubySettings.create = function create(properties) { + return new RubySettings(properties); + }; + + /** + * Encodes the specified RubySettings message. Does not implicitly {@link google.api.RubySettings.verify|verify} messages. + * @function encode + * @memberof google.api.RubySettings + * @static + * @param {google.api.IRubySettings} message RubySettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RubySettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified RubySettings message, length delimited. Does not implicitly {@link google.api.RubySettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.RubySettings + * @static + * @param {google.api.IRubySettings} message RubySettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RubySettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a RubySettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.RubySettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.RubySettings} RubySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RubySettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.RubySettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a RubySettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.RubySettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.RubySettings} RubySettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RubySettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a RubySettings message. + * @function verify + * @memberof google.api.RubySettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + RubySettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a RubySettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.RubySettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.RubySettings} RubySettings + */ + RubySettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.RubySettings) + return object; + var message = new $root.google.api.RubySettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.RubySettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a RubySettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.RubySettings + * @static + * @param {google.api.RubySettings} message RubySettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + RubySettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this RubySettings to JSON. + * @function toJSON + * @memberof google.api.RubySettings + * @instance + * @returns {Object.} JSON object + */ + RubySettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for RubySettings + * @function getTypeUrl + * @memberof google.api.RubySettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + RubySettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.RubySettings"; + }; + + return RubySettings; + })(); + + api.GoSettings = (function() { + + /** + * Properties of a GoSettings. + * @memberof google.api + * @interface IGoSettings + * @property {google.api.ICommonLanguageSettings|null} [common] GoSettings common + */ + + /** + * Constructs a new GoSettings. + * @memberof google.api + * @classdesc Represents a GoSettings. + * @implements IGoSettings + * @constructor + * @param {google.api.IGoSettings=} [properties] Properties to set + */ + function GoSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GoSettings common. + * @member {google.api.ICommonLanguageSettings|null|undefined} common + * @memberof google.api.GoSettings + * @instance + */ + GoSettings.prototype.common = null; + + /** + * Creates a new GoSettings instance using the specified properties. + * @function create + * @memberof google.api.GoSettings + * @static + * @param {google.api.IGoSettings=} [properties] Properties to set + * @returns {google.api.GoSettings} GoSettings instance + */ + GoSettings.create = function create(properties) { + return new GoSettings(properties); + }; + + /** + * Encodes the specified GoSettings message. Does not implicitly {@link google.api.GoSettings.verify|verify} messages. + * @function encode + * @memberof google.api.GoSettings + * @static + * @param {google.api.IGoSettings} message GoSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GoSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.common != null && Object.hasOwnProperty.call(message, "common")) + $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified GoSettings message, length delimited. Does not implicitly {@link google.api.GoSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.GoSettings + * @static + * @param {google.api.IGoSettings} message GoSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GoSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a GoSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.GoSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.GoSettings} GoSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GoSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a GoSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.GoSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.GoSettings} GoSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GoSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a GoSettings message. + * @function verify + * @memberof google.api.GoSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GoSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.common != null && message.hasOwnProperty("common")) { + var error = $root.google.api.CommonLanguageSettings.verify(message.common); + if (error) + return "common." + error; + } + return null; + }; + + /** + * Creates a GoSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.GoSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.GoSettings} GoSettings + */ + GoSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.GoSettings) + return object; + var message = new $root.google.api.GoSettings(); + if (object.common != null) { + if (typeof object.common !== "object") + throw TypeError(".google.api.GoSettings.common: object expected"); + message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); + } + return message; + }; + + /** + * Creates a plain object from a GoSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.GoSettings + * @static + * @param {google.api.GoSettings} message GoSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + GoSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.common = null; + if (message.common != null && message.hasOwnProperty("common")) + object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + return object; + }; + + /** + * Converts this GoSettings to JSON. + * @function toJSON + * @memberof google.api.GoSettings + * @instance + * @returns {Object.} JSON object + */ + GoSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for GoSettings + * @function getTypeUrl + * @memberof google.api.GoSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + GoSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.GoSettings"; + }; + + return GoSettings; + })(); + + api.MethodSettings = (function() { + + /** + * Properties of a MethodSettings. + * @memberof google.api + * @interface IMethodSettings + * @property {string|null} [selector] MethodSettings selector + * @property {google.api.MethodSettings.ILongRunning|null} [longRunning] MethodSettings longRunning + */ + + /** + * Constructs a new MethodSettings. + * @memberof google.api + * @classdesc Represents a MethodSettings. + * @implements IMethodSettings + * @constructor + * @param {google.api.IMethodSettings=} [properties] Properties to set + */ + function MethodSettings(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MethodSettings selector. + * @member {string} selector + * @memberof google.api.MethodSettings + * @instance + */ + MethodSettings.prototype.selector = ""; + + /** + * MethodSettings longRunning. + * @member {google.api.MethodSettings.ILongRunning|null|undefined} longRunning + * @memberof google.api.MethodSettings + * @instance + */ + MethodSettings.prototype.longRunning = null; + + /** + * Creates a new MethodSettings instance using the specified properties. + * @function create + * @memberof google.api.MethodSettings + * @static + * @param {google.api.IMethodSettings=} [properties] Properties to set + * @returns {google.api.MethodSettings} MethodSettings instance + */ + MethodSettings.create = function create(properties) { + return new MethodSettings(properties); + }; + + /** + * Encodes the specified MethodSettings message. Does not implicitly {@link google.api.MethodSettings.verify|verify} messages. + * @function encode + * @memberof google.api.MethodSettings + * @static + * @param {google.api.IMethodSettings} message MethodSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodSettings.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); + if (message.longRunning != null && Object.hasOwnProperty.call(message, "longRunning")) + $root.google.api.MethodSettings.LongRunning.encode(message.longRunning, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MethodSettings message, length delimited. Does not implicitly {@link google.api.MethodSettings.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.MethodSettings + * @static + * @param {google.api.IMethodSettings} message MethodSettings message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MethodSettings.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MethodSettings message from the specified reader or buffer. + * @function decode + * @memberof google.api.MethodSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.MethodSettings} MethodSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodSettings.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.MethodSettings(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.selector = reader.string(); + break; + } + case 2: { + message.longRunning = $root.google.api.MethodSettings.LongRunning.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MethodSettings message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.MethodSettings + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.MethodSettings} MethodSettings + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MethodSettings.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MethodSettings message. + * @function verify + * @memberof google.api.MethodSettings + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MethodSettings.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.selector != null && message.hasOwnProperty("selector")) + if (!$util.isString(message.selector)) + return "selector: string expected"; + if (message.longRunning != null && message.hasOwnProperty("longRunning")) { + var error = $root.google.api.MethodSettings.LongRunning.verify(message.longRunning); + if (error) + return "longRunning." + error; + } + return null; + }; + + /** + * Creates a MethodSettings message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.MethodSettings + * @static + * @param {Object.} object Plain object + * @returns {google.api.MethodSettings} MethodSettings + */ + MethodSettings.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.MethodSettings) + return object; + var message = new $root.google.api.MethodSettings(); + if (object.selector != null) + message.selector = String(object.selector); + if (object.longRunning != null) { + if (typeof object.longRunning !== "object") + throw TypeError(".google.api.MethodSettings.longRunning: object expected"); + message.longRunning = $root.google.api.MethodSettings.LongRunning.fromObject(object.longRunning); + } + return message; + }; + + /** + * Creates a plain object from a MethodSettings message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.MethodSettings + * @static + * @param {google.api.MethodSettings} message MethodSettings + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MethodSettings.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.selector = ""; + object.longRunning = null; + } + if (message.selector != null && message.hasOwnProperty("selector")) + object.selector = message.selector; + if (message.longRunning != null && message.hasOwnProperty("longRunning")) + object.longRunning = $root.google.api.MethodSettings.LongRunning.toObject(message.longRunning, options); + return object; + }; + + /** + * Converts this MethodSettings to JSON. + * @function toJSON + * @memberof google.api.MethodSettings + * @instance + * @returns {Object.} JSON object + */ + MethodSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MethodSettings + * @function getTypeUrl + * @memberof google.api.MethodSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MethodSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.MethodSettings"; + }; + + MethodSettings.LongRunning = (function() { + + /** + * Properties of a LongRunning. + * @memberof google.api.MethodSettings + * @interface ILongRunning + * @property {google.protobuf.IDuration|null} [initialPollDelay] LongRunning initialPollDelay + * @property {number|null} [pollDelayMultiplier] LongRunning pollDelayMultiplier + * @property {google.protobuf.IDuration|null} [maxPollDelay] LongRunning maxPollDelay + * @property {google.protobuf.IDuration|null} [totalPollTimeout] LongRunning totalPollTimeout + */ + + /** + * Constructs a new LongRunning. + * @memberof google.api.MethodSettings + * @classdesc Represents a LongRunning. + * @implements ILongRunning + * @constructor + * @param {google.api.MethodSettings.ILongRunning=} [properties] Properties to set + */ + function LongRunning(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * LongRunning initialPollDelay. + * @member {google.protobuf.IDuration|null|undefined} initialPollDelay + * @memberof google.api.MethodSettings.LongRunning + * @instance + */ + LongRunning.prototype.initialPollDelay = null; + + /** + * LongRunning pollDelayMultiplier. + * @member {number} pollDelayMultiplier + * @memberof google.api.MethodSettings.LongRunning + * @instance + */ + LongRunning.prototype.pollDelayMultiplier = 0; + + /** + * LongRunning maxPollDelay. + * @member {google.protobuf.IDuration|null|undefined} maxPollDelay + * @memberof google.api.MethodSettings.LongRunning + * @instance + */ + LongRunning.prototype.maxPollDelay = null; + + /** + * LongRunning totalPollTimeout. + * @member {google.protobuf.IDuration|null|undefined} totalPollTimeout + * @memberof google.api.MethodSettings.LongRunning + * @instance + */ + LongRunning.prototype.totalPollTimeout = null; + + /** + * Creates a new LongRunning instance using the specified properties. + * @function create + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {google.api.MethodSettings.ILongRunning=} [properties] Properties to set + * @returns {google.api.MethodSettings.LongRunning} LongRunning instance + */ + LongRunning.create = function create(properties) { + return new LongRunning(properties); + }; + + /** + * Encodes the specified LongRunning message. Does not implicitly {@link google.api.MethodSettings.LongRunning.verify|verify} messages. + * @function encode + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {google.api.MethodSettings.ILongRunning} message LongRunning message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LongRunning.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.initialPollDelay != null && Object.hasOwnProperty.call(message, "initialPollDelay")) + $root.google.protobuf.Duration.encode(message.initialPollDelay, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.pollDelayMultiplier != null && Object.hasOwnProperty.call(message, "pollDelayMultiplier")) + writer.uint32(/* id 2, wireType 5 =*/21).float(message.pollDelayMultiplier); + if (message.maxPollDelay != null && Object.hasOwnProperty.call(message, "maxPollDelay")) + $root.google.protobuf.Duration.encode(message.maxPollDelay, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.totalPollTimeout != null && Object.hasOwnProperty.call(message, "totalPollTimeout")) + $root.google.protobuf.Duration.encode(message.totalPollTimeout, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified LongRunning message, length delimited. Does not implicitly {@link google.api.MethodSettings.LongRunning.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {google.api.MethodSettings.ILongRunning} message LongRunning message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + LongRunning.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a LongRunning message from the specified reader or buffer. + * @function decode + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.MethodSettings.LongRunning} LongRunning + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LongRunning.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.MethodSettings.LongRunning(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.initialPollDelay = $root.google.protobuf.Duration.decode(reader, reader.uint32()); + break; + } + case 2: { + message.pollDelayMultiplier = reader.float(); + break; + } + case 3: { + message.maxPollDelay = $root.google.protobuf.Duration.decode(reader, reader.uint32()); + break; + } + case 4: { + message.totalPollTimeout = $root.google.protobuf.Duration.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a LongRunning message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.MethodSettings.LongRunning} LongRunning + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + LongRunning.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a LongRunning message. + * @function verify + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + LongRunning.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.initialPollDelay != null && message.hasOwnProperty("initialPollDelay")) { + var error = $root.google.protobuf.Duration.verify(message.initialPollDelay); + if (error) + return "initialPollDelay." + error; + } + if (message.pollDelayMultiplier != null && message.hasOwnProperty("pollDelayMultiplier")) + if (typeof message.pollDelayMultiplier !== "number") + return "pollDelayMultiplier: number expected"; + if (message.maxPollDelay != null && message.hasOwnProperty("maxPollDelay")) { + var error = $root.google.protobuf.Duration.verify(message.maxPollDelay); + if (error) + return "maxPollDelay." + error; + } + if (message.totalPollTimeout != null && message.hasOwnProperty("totalPollTimeout")) { + var error = $root.google.protobuf.Duration.verify(message.totalPollTimeout); + if (error) + return "totalPollTimeout." + error; + } + return null; + }; + + /** + * Creates a LongRunning message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {Object.} object Plain object + * @returns {google.api.MethodSettings.LongRunning} LongRunning + */ + LongRunning.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.MethodSettings.LongRunning) + return object; + var message = new $root.google.api.MethodSettings.LongRunning(); + if (object.initialPollDelay != null) { + if (typeof object.initialPollDelay !== "object") + throw TypeError(".google.api.MethodSettings.LongRunning.initialPollDelay: object expected"); + message.initialPollDelay = $root.google.protobuf.Duration.fromObject(object.initialPollDelay); + } + if (object.pollDelayMultiplier != null) + message.pollDelayMultiplier = Number(object.pollDelayMultiplier); + if (object.maxPollDelay != null) { + if (typeof object.maxPollDelay !== "object") + throw TypeError(".google.api.MethodSettings.LongRunning.maxPollDelay: object expected"); + message.maxPollDelay = $root.google.protobuf.Duration.fromObject(object.maxPollDelay); + } + if (object.totalPollTimeout != null) { + if (typeof object.totalPollTimeout !== "object") + throw TypeError(".google.api.MethodSettings.LongRunning.totalPollTimeout: object expected"); + message.totalPollTimeout = $root.google.protobuf.Duration.fromObject(object.totalPollTimeout); + } + return message; + }; + + /** + * Creates a plain object from a LongRunning message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {google.api.MethodSettings.LongRunning} message LongRunning + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + LongRunning.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.initialPollDelay = null; + object.pollDelayMultiplier = 0; + object.maxPollDelay = null; + object.totalPollTimeout = null; + } + if (message.initialPollDelay != null && message.hasOwnProperty("initialPollDelay")) + object.initialPollDelay = $root.google.protobuf.Duration.toObject(message.initialPollDelay, options); + if (message.pollDelayMultiplier != null && message.hasOwnProperty("pollDelayMultiplier")) + object.pollDelayMultiplier = options.json && !isFinite(message.pollDelayMultiplier) ? String(message.pollDelayMultiplier) : message.pollDelayMultiplier; + if (message.maxPollDelay != null && message.hasOwnProperty("maxPollDelay")) + object.maxPollDelay = $root.google.protobuf.Duration.toObject(message.maxPollDelay, options); + if (message.totalPollTimeout != null && message.hasOwnProperty("totalPollTimeout")) + object.totalPollTimeout = $root.google.protobuf.Duration.toObject(message.totalPollTimeout, options); + return object; + }; + + /** + * Converts this LongRunning to JSON. + * @function toJSON + * @memberof google.api.MethodSettings.LongRunning + * @instance + * @returns {Object.} JSON object + */ + LongRunning.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for LongRunning + * @function getTypeUrl + * @memberof google.api.MethodSettings.LongRunning + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + LongRunning.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.MethodSettings.LongRunning"; + }; + + return LongRunning; + })(); + + return MethodSettings; + })(); + + /** + * ClientLibraryOrganization enum. + * @name google.api.ClientLibraryOrganization + * @enum {number} + * @property {number} CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED=0 CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED value + * @property {number} CLOUD=1 CLOUD value + * @property {number} ADS=2 ADS value + * @property {number} PHOTOS=3 PHOTOS value + * @property {number} STREET_VIEW=4 STREET_VIEW value + */ + api.ClientLibraryOrganization = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED"] = 0; + values[valuesById[1] = "CLOUD"] = 1; + values[valuesById[2] = "ADS"] = 2; + values[valuesById[3] = "PHOTOS"] = 3; + values[valuesById[4] = "STREET_VIEW"] = 4; + return values; + })(); + + /** + * ClientLibraryDestination enum. + * @name google.api.ClientLibraryDestination + * @enum {number} + * @property {number} CLIENT_LIBRARY_DESTINATION_UNSPECIFIED=0 CLIENT_LIBRARY_DESTINATION_UNSPECIFIED value + * @property {number} GITHUB=10 GITHUB value + * @property {number} PACKAGE_MANAGER=20 PACKAGE_MANAGER value + */ + api.ClientLibraryDestination = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "CLIENT_LIBRARY_DESTINATION_UNSPECIFIED"] = 0; + values[valuesById[10] = "GITHUB"] = 10; + values[valuesById[20] = "PACKAGE_MANAGER"] = 20; + return values; + })(); + + /** + * LaunchStage enum. + * @name google.api.LaunchStage + * @enum {number} + * @property {number} LAUNCH_STAGE_UNSPECIFIED=0 LAUNCH_STAGE_UNSPECIFIED value + * @property {number} UNIMPLEMENTED=6 UNIMPLEMENTED value + * @property {number} PRELAUNCH=7 PRELAUNCH value + * @property {number} EARLY_ACCESS=1 EARLY_ACCESS value + * @property {number} ALPHA=2 ALPHA value + * @property {number} BETA=3 BETA value + * @property {number} GA=4 GA value + * @property {number} DEPRECATED=5 DEPRECATED value + */ + api.LaunchStage = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "LAUNCH_STAGE_UNSPECIFIED"] = 0; + values[valuesById[6] = "UNIMPLEMENTED"] = 6; + values[valuesById[7] = "PRELAUNCH"] = 7; + values[valuesById[1] = "EARLY_ACCESS"] = 1; + values[valuesById[2] = "ALPHA"] = 2; + values[valuesById[3] = "BETA"] = 3; + values[valuesById[4] = "GA"] = 4; + values[valuesById[5] = "DEPRECATED"] = 5; + return values; + })(); + /** * FieldBehavior enum. * @name google.api.FieldBehavior diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index f37029dbf6e..2abd4ae3de3 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -2156,6 +2156,13 @@ "type": "bool", "id": 7 }, + "deprecatedLegacyJsonFieldConflicts": { + "type": "bool", + "id": 11, + "options": { + "deprecated": true + } + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2239,6 +2246,21 @@ "default": false } }, + "debugRedact": { + "type": "bool", + "id": 16, + "options": { + "default": false + } + }, + "retention": { + "type": "OptionRetention", + "id": 17 + }, + "target": { + "type": "OptionTargetType", + "id": 18 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2271,6 +2293,27 @@ "JS_STRING": 1, "JS_NUMBER": 2 } + }, + "OptionRetention": { + "values": { + "RETENTION_UNKNOWN": 0, + "RETENTION_RUNTIME": 1, + "RETENTION_SOURCE": 2 + } + }, + "OptionTargetType": { + "values": { + "TARGET_TYPE_UNKNOWN": 0, + "TARGET_TYPE_FILE": 1, + "TARGET_TYPE_EXTENSION_RANGE": 2, + "TARGET_TYPE_MESSAGE": 3, + "TARGET_TYPE_FIELD": 4, + "TARGET_TYPE_ONEOF": 5, + "TARGET_TYPE_ENUM": 6, + "TARGET_TYPE_ENUM_ENTRY": 7, + "TARGET_TYPE_SERVICE": 8, + "TARGET_TYPE_METHOD": 9 + } } } }, @@ -2302,6 +2345,13 @@ "default": false } }, + "deprecatedLegacyJsonFieldConflicts": { + "type": "bool", + "id": 6, + "options": { + "deprecated": true + } + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2535,6 +2585,18 @@ } } }, + "Duration": { + "fields": { + "seconds": { + "type": "int64", + "id": 1 + }, + "nanos": { + "type": "int32", + "id": 2 + } + } + }, "Timestamp": { "fields": { "seconds": { @@ -2749,6 +2811,248 @@ "id": 1050, "extend": "google.protobuf.ServiceOptions" }, + "CommonLanguageSettings": { + "fields": { + "referenceDocsUri": { + "type": "string", + "id": 1, + "options": { + "deprecated": true + } + }, + "destinations": { + "rule": "repeated", + "type": "ClientLibraryDestination", + "id": 2 + } + } + }, + "ClientLibrarySettings": { + "fields": { + "version": { + "type": "string", + "id": 1 + }, + "launchStage": { + "type": "LaunchStage", + "id": 2 + }, + "restNumericEnums": { + "type": "bool", + "id": 3 + }, + "javaSettings": { + "type": "JavaSettings", + "id": 21 + }, + "cppSettings": { + "type": "CppSettings", + "id": 22 + }, + "phpSettings": { + "type": "PhpSettings", + "id": 23 + }, + "pythonSettings": { + "type": "PythonSettings", + "id": 24 + }, + "nodeSettings": { + "type": "NodeSettings", + "id": 25 + }, + "dotnetSettings": { + "type": "DotnetSettings", + "id": 26 + }, + "rubySettings": { + "type": "RubySettings", + "id": 27 + }, + "goSettings": { + "type": "GoSettings", + "id": 28 + } + } + }, + "Publishing": { + "fields": { + "methodSettings": { + "rule": "repeated", + "type": "MethodSettings", + "id": 2 + }, + "newIssueUri": { + "type": "string", + "id": 101 + }, + "documentationUri": { + "type": "string", + "id": 102 + }, + "apiShortName": { + "type": "string", + "id": 103 + }, + "githubLabel": { + "type": "string", + "id": 104 + }, + "codeownerGithubTeams": { + "rule": "repeated", + "type": "string", + "id": 105 + }, + "docTagPrefix": { + "type": "string", + "id": 106 + }, + "organization": { + "type": "ClientLibraryOrganization", + "id": 107 + }, + "librarySettings": { + "rule": "repeated", + "type": "ClientLibrarySettings", + "id": 109 + } + } + }, + "JavaSettings": { + "fields": { + "libraryPackage": { + "type": "string", + "id": 1 + }, + "serviceClassNames": { + "keyType": "string", + "type": "string", + "id": 2 + }, + "common": { + "type": "CommonLanguageSettings", + "id": 3 + } + } + }, + "CppSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "PhpSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "PythonSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "NodeSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "DotnetSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "RubySettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "GoSettings": { + "fields": { + "common": { + "type": "CommonLanguageSettings", + "id": 1 + } + } + }, + "MethodSettings": { + "fields": { + "selector": { + "type": "string", + "id": 1 + }, + "longRunning": { + "type": "LongRunning", + "id": 2 + } + }, + "nested": { + "LongRunning": { + "fields": { + "initialPollDelay": { + "type": "google.protobuf.Duration", + "id": 1 + }, + "pollDelayMultiplier": { + "type": "float", + "id": 2 + }, + "maxPollDelay": { + "type": "google.protobuf.Duration", + "id": 3 + }, + "totalPollTimeout": { + "type": "google.protobuf.Duration", + "id": 4 + } + } + } + } + }, + "ClientLibraryOrganization": { + "values": { + "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED": 0, + "CLOUD": 1, + "ADS": 2, + "PHOTOS": 3, + "STREET_VIEW": 4 + } + }, + "ClientLibraryDestination": { + "values": { + "CLIENT_LIBRARY_DESTINATION_UNSPECIFIED": 0, + "GITHUB": 10, + "PACKAGE_MANAGER": 20 + } + }, + "LaunchStage": { + "values": { + "LAUNCH_STAGE_UNSPECIFIED": 0, + "UNIMPLEMENTED": 6, + "PRELAUNCH": 7, + "EARLY_ACCESS": 1, + "ALPHA": 2, + "BETA": 3, + "GA": 4, + "DEPRECATED": 5 + } + }, "fieldBehavior": { "rule": "repeated", "type": "google.api.FieldBehavior", From 42bfe101810e8522cb38966df1ef08c0644b6d04 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 10 Aug 2023 17:03:35 -0400 Subject: [PATCH 228/333] feat: storage write api veneer (#328) Co-authored-by: Lo Ferris Co-authored-by: Owl Bot --- handwritten/bigquery-storage/README.md | 1 + handwritten/bigquery-storage/package.json | 3 + .../bigquery-storage/src/adapt/proto.ts | 4 + handwritten/bigquery-storage/src/index.ts | 13 +- .../src/managedwriter/error.ts | 44 + .../src/managedwriter/index.ts | 37 + .../src/managedwriter/json_writer.ts | 125 +++ .../src/managedwriter/logger.ts | 47 + .../src/managedwriter/pending_write.ts | 83 ++ .../src/managedwriter/stream_connection.ts | 395 ++++++++ .../src/managedwriter/stream_types.ts | 75 ++ .../src/managedwriter/writer.ts | 110 ++ .../src/managedwriter/writer_client.ts | 304 ++++++ .../system-test/managed_writer_client_test.ts | 936 ++++++++++++++++++ handwritten/bigquery-storage/tsconfig.json | 3 +- 15 files changed, 2178 insertions(+), 2 deletions(-) create mode 100644 handwritten/bigquery-storage/src/managedwriter/error.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/index.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/json_writer.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/logger.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/pending_write.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/stream_connection.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/stream_types.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/writer.ts create mode 100644 handwritten/bigquery-storage/src/managedwriter/writer_client.ts create mode 100644 handwritten/bigquery-storage/system-test/managed_writer_client_test.ts diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index d7dccea8252..c2502aa1d6c 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -185,6 +185,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | +| Append_rows_buffered | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_buffered.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_buffered.js,samples/README.md) | | Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | | Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | | Append_rows_table_to_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_table_to_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_table_to_proto2.js,samples/README.md) | diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7edc6226051..04fedc55985 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -30,6 +30,8 @@ "google-gax": "^4.0.3" }, "devDependencies": { + "@google-cloud/bigquery": "^6.1.0", + "@types/uuid": "^9.0.1", "@types/mocha": "^9.0.0", "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", @@ -45,6 +47,7 @@ "pack-n-play": "^1.0.0-2", "sinon": "^15.0.0", "ts-loader": "^9.0.0", + "uuid": "^9.0.0", "typescript": "^5.1.6", "webpack": "^5.0.0", "webpack-cli": "^5.0.0" diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 0602d7d1293..3833a7ffabf 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -186,6 +186,10 @@ function normalizeDescriptorSet(fds: FileDescriptorSet): DescriptorProto { export function normalizeDescriptor(dp: DescriptorProto): DescriptorProto { dp.name = normalizeName(dp.name); for (const f of dp.field) { + if (!f.label) { + f.label = + protos.google.protobuf.FieldDescriptorProto.Label.LABEL_OPTIONAL; + } if (f.proto3Optional) { f.proto3Optional = null; } diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index cbf98da7c50..85008b70dcf 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -18,22 +18,33 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; +import * as managedwriter from './managedwriter'; const BigQueryReadClient = v1.BigQueryReadClient; type BigQueryReadClient = v1.BigQueryReadClient; const BigQueryWriteClient = v1.BigQueryWriteClient; type BigQueryWriteClient = v1.BigQueryWriteClient; const BigQueryStorageClient = v1beta1.BigQueryStorageClient; type BigQueryStorageClient = v1beta1.BigQueryStorageClient; +const WriterClient = managedwriter.WriterClient; +type WriterClient = managedwriter.WriterClient; export { v1, BigQueryReadClient, v1beta1, BigQueryStorageClient, BigQueryWriteClient, + managedwriter, + WriterClient, }; // For compatibility with JavaScript libraries we need to provide this default export: // tslint:disable-next-line no-default-export -export default {v1, BigQueryReadClient, BigQueryWriteClient}; +export default { + v1, + BigQueryReadClient, + BigQueryWriteClient, + managedwriter, + WriterClient, +}; import * as protos from '../protos/protos'; export {protos}; import * as adapt from './adapt'; diff --git a/handwritten/bigquery-storage/src/managedwriter/error.ts b/handwritten/bigquery-storage/src/managedwriter/error.ts new file mode 100644 index 00000000000..bf52eeb1d20 --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/error.ts @@ -0,0 +1,44 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as gax from 'google-gax'; +import * as protos from '../../protos/protos'; + +type StorageError = protos.google.cloud.bigquery.storage.v1.IStorageError; + +const StorageError = protos.google.cloud.bigquery.storage.v1.StorageError; + +/** + * The BigQuery Storage API service augments applicable errors with service-specific details in + * the form of a StorageError message. + * + * @param {gax.GoogleError} err + * @returns {google.cloud.bigquery.storage.v1.StorageError} + */ +export function parseStorageErrors(err: gax.GoogleError): StorageError[] { + const storageErrors: StorageError[] = []; + if ( + err.metadata && + err.metadata.get('google.cloud.bigquery.storage.v1.storageerror-bin') + ) { + const serrors = err.metadata.get( + 'google.cloud.bigquery.storage.v1.storageerror-bin' + ) as Buffer[]; + for (const serr of serrors) { + const storageError = StorageError.decode(serr); + storageErrors.push(storageError); + } + } + return storageErrors; +} diff --git a/handwritten/bigquery-storage/src/managedwriter/index.ts b/handwritten/bigquery-storage/src/managedwriter/index.ts new file mode 100644 index 00000000000..ff6d052311f --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/index.ts @@ -0,0 +1,37 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * Package managedwriter provides an EXPERIMENTAL thick client around the + * BigQuery storage API's BigQueryWriteClient. + * More information about this new write client may also be found in + * the public documentation: https://cloud.google.com/bigquery/docs/write-api + * + * It is EXPERIMENTAL and subject to change or removal without notice. This is primarily to signal that this + * package may still make breaking changes to existing methods and functionality. + * + * @namespace managedwriter + */ + +export {WriterClient} from './writer_client'; +export {Writer} from './writer'; +export {JSONWriter} from './json_writer'; +export { + DefaultStream, + BufferedStream, + CommittedStream, + PendingStream, +} from './stream_types'; +export {parseStorageErrors} from './error'; +export {setLogFunction} from './logger'; diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts new file mode 100644 index 00000000000..8389860cd98 --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -0,0 +1,125 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {protobuf} from 'google-gax'; +import * as protos from '../../protos/protos'; +import {PendingWrite} from './pending_write'; +import {StreamConnection, RemoveListener} from './stream_connection'; +import * as adapt from '../adapt'; +import {Writer} from './writer'; + +type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; +type IInt64Value = protos.google.protobuf.IInt64Value; +type IDescriptorProto = protos.google.protobuf.IDescriptorProto; +type DescriptorProto = protos.google.protobuf.DescriptorProto; +type JSONPrimitive = string | number | boolean | null; +type JSONValue = JSONPrimitive | JSONObject | JSONArray; +type JSONObject = {[member: string]: JSONValue}; +type JSONArray = Array; +type JSONList = Array; + +const DescriptorProto = protos.google.protobuf.DescriptorProto; +const {Type} = protobuf; + +/** + * A StreamWriter that can write JSON data to BigQuery tables. The JSONWriter is + * built on top of a Writer, and it simply converts all JSON data to protobuf messages then + * calls Writer's appendRows() method to write to BigQuery tables. It maintains all Writer + * functions, but also provides an additional feature: schema update support, where if the BigQuery + * table schema is updated, users will be able to ingest data on the new schema after some time (in + * order of minutes). + * + * @class + * @extends managedwriter.Writer + * @memberof managedwriter + * @see managedwriter.Writer + */ +export class JSONWriter { + private _writer: Writer; + private _type: protobuf.Type = Type.fromJSON('root', { + fields: {}, + }); + private _schemaListener: RemoveListener; + + /** + * Creates a new JSONWriter instance. + * + * @param {Object} params - The parameters for the JSONWriter. + * @param {StreamConnection} params.connection - The stream connection + * to the BigQuery streaming insert operation. + * @param {IDescriptorProto} params.protoDescriptor - The proto descriptor + * for the JSON rows. + */ + constructor(params: { + connection: StreamConnection; + protoDescriptor: IDescriptorProto; + }) { + const {connection, protoDescriptor} = params; + this._writer = new Writer(params); + this._schemaListener = connection.onSchemaUpdated(this.onSchemaUpdated); + this.setProtoDescriptor(protoDescriptor); + } + + private onSchemaUpdated = (schema: TableSchema) => { + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + schema, + 'root' + ); + this.setProtoDescriptor(protoDescriptor); + }; + + /** + * Update the proto descriptor for the Writer. + * Internally a reconnection event is gonna happen to apply + * the new proto descriptor. + * + * @param {IDescriptorProto} protoDescriptor - The proto descriptor. + */ + setProtoDescriptor(protoDescriptor: IDescriptorProto): void { + const normalized = adapt.normalizeDescriptor( + new DescriptorProto(protoDescriptor) + ); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this._type = (Type as any).fromDescriptor(normalized); + this._writer.setProtoDescriptor(protoDescriptor); + } + + /** + * Writes a JSONList that contains objects to be written to the BigQuery table by first converting + * the JSON data to protobuf messages, then using Writer's appendRows() to write the data at current end + * of stream. If there is a schema update, the current Writer is closed and reopened with the updated schema. + * + * @param {JSONList} rows - The list of JSON rows. + * @param {number|Long|string|null} offsetValue? - The offset value. + * @returns {managedwriter.PendingWrite} The pending write. + */ + appendRows(rows: JSONList, offsetValue?: IInt64Value['value']): PendingWrite { + const serializedRows = rows.map(r => { + const msg = this._type.create(r); + return this._type.encode(msg).finish(); + }); + const pw = this._writer.appendRows( + { + serializedRows, + }, + offsetValue + ); + return pw; + } + + close() { + this._writer.close(); + this._schemaListener.off(); + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/logger.ts b/handwritten/bigquery-storage/src/managedwriter/logger.ts new file mode 100644 index 00000000000..d7c02f7aeed --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/logger.ts @@ -0,0 +1,47 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as util from 'util'; + +/*! The external function used to emit logs. */ +let logFunction: ((msg: string) => void) | null = null; + +/** + * Log function to use for debug output. By default, we don't perform any + * logging. + * + * @private + * @internal + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function logger(source: string, msg: string, ...otherArgs: any[]) { + if (logFunction) { + const time = new Date().toISOString(); + const formattedMsg = util.format( + `D ${time} | ${source} | ${msg} |`, + ...otherArgs + ); + logFunction(formattedMsg); + } +} + +/** + * Sets or disables the log function for all active Firestore instances. + * + * @param logger A log function that takes a message (such as `console.log`) or + * `null` to turn off logging. + */ +export function setLogFunction(logger: ((msg: string) => void) | null): void { + logFunction = logger; +} diff --git a/handwritten/bigquery-storage/src/managedwriter/pending_write.ts b/handwritten/bigquery-storage/src/managedwriter/pending_write.ts new file mode 100644 index 00000000000..2e5a748316c --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/pending_write.ts @@ -0,0 +1,83 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; + +type AppendRowsResponse = + protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse; +type AppendRowRequest = + protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; + +/** + * PendingWrite tracks state for a set of rows that are part of a single + * append request. PendingWrites have a Promise interface to await for + * append results, as well as any errors encountered while processing + * the request. + */ +export class PendingWrite { + private request: AppendRowRequest; + private response?: AppendRowsResponse; + private promise: Promise; + private resolveFunc?: (response: AppendRowsResponse) => void; + private rejectFunc?: (reason?: protos.google.rpc.IStatus) => void; + + constructor(request: AppendRowRequest) { + this.request = request; + this.promise = new Promise((resolve, reject) => { + this.resolveFunc = resolve; + this.rejectFunc = reject; + }); + } + + _markDone(err: Error | null, response?: AppendRowsResponse) { + if (err) { + this.rejectFunc && this.rejectFunc(err); + return; + } + + if (response) { + this.response = response; + this.resolveFunc && this.resolveFunc(response); + return; + } + + this.rejectFunc && this.rejectFunc(new Error('ended with no status')); + } + + /** + * Abort pending write so calls to GetResult can be unblocked/cancelled. + */ + abort() { + this.rejectFunc && this.rejectFunc(new Error('aborted')); + } + + /** + * Access the AppendRowRequest that generated this pending write request. + */ + getRequest(): AppendRowRequest { + return this.request; + } + + /** + * Promise interface to await for + * append results, as well as any errors encountered while processing + * the request. + */ + getResult(): Promise { + if (this.response) { + return Promise.resolve(this.response); + } + return this.promise; + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts new file mode 100644 index 00000000000..995fa7d60ff --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -0,0 +1,395 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as gax from 'google-gax'; +import {EventEmitter} from 'events'; +import * as protos from '../../protos/protos'; + +import {WriterClient} from './writer_client'; +import {PendingWrite} from './pending_write'; +import {logger} from './logger'; +import {parseStorageErrors} from './error'; + +type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; +type IInt64Value = protos.google.protobuf.IInt64Value; +type AppendRowsResponse = + protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse; +type AppendRowRequest = + protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; +type FinalizeWriteStreamResponse = + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse; +type FinalizeWriteStreamRequest = + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest; +type FlushRowsResponse = + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse; +type FlushRowsRequest = + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest; + +export type RemoveListener = { + off: () => void; +}; + +/** + * StreamConnection is responsible for writing requests to a bidirecional + * GRPC connection against the Storage Write API appendRows method. + * + * All the requests are sent without flow control, and writes are sent + * in receiving order. It's user's responsibility to do the flow control + * and maintain the lifetime of the requests. + * + * @class + * @extends EventEmitter + * @memberof managedwriter + */ +export class StreamConnection extends EventEmitter { + private _streamId: string; + private _writeClient: WriterClient; + private _connection?: gax.CancellableStream | null; + private _callOptions?: gax.CallOptions; + private _pendingWrites: PendingWrite[]; + + constructor( + streamId: string, + writeClient: WriterClient, + options?: gax.CallOptions + ) { + super(); + this._streamId = streamId; + this._writeClient = writeClient; + this._callOptions = options; + this._pendingWrites = []; + this.open(); + } + + open() { + if (this.isOpen()) { + this.close(); + } + const callOptions = this.resolveCallOptions( + this._streamId, + this._callOptions + ); + const client = this._writeClient.getClient(); + const connection = client.appendRows(callOptions); + this._connection = connection; + this._connection.on('data', this.handleData); + this._connection.on('error', this.handleError); + this._connection.on('close', () => { + this.trace('connection closed'); + }); + this._connection.on('pause', () => { + this.trace('connection paused'); + }); + this._connection.on('resume', () => { + this.trace('connection resumed'); + }); + this._connection.on('end', () => { + this.trace('connection ended'); + }); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace(msg: string, ...otherArgs: any[]) { + logger('stream_connection', msg, ...otherArgs); + } + + private handleError = (err: gax.GoogleError) => { + this.trace('on error', err, JSON.stringify(err)); + if (this.shouldReconnect(err)) { + this.reconnect(); + return; + } + let nextPendingWrite = this.getNextPendingWrite(); + if (this.isPermanentError(err)) { + this.trace('found permanent error', err); + while (nextPendingWrite) { + this.ackNextPendingWrite(err); + nextPendingWrite = this.getNextPendingWrite(); + } + this.emit('error', err); + return; + } + if (this.isRequestError(err) && nextPendingWrite) { + this.trace( + 'found request error with pending write', + err, + nextPendingWrite + ); + this.ackNextPendingWrite(err); + return; + } + this.emit('error', err); + }; + + private shouldReconnect(err: gax.GoogleError): boolean { + if ( + err.code && + [gax.Status.UNAVAILABLE, gax.Status.RESOURCE_EXHAUSTED].includes( + err.code + ) && + err.message + ) { + const detail = err.message.toLowerCase(); + const knownErrors = [ + 'service is currently unavailable', // schema mismatch + 'read econnreset', // idle connection reset + 'bandwidth exhausted', + 'memory limit exceeded', + ]; + const isKnownError = + knownErrors.findIndex(err => detail.includes(err)) !== -1; + return isKnownError; + } + return false; + } + + private isPermanentError(err: gax.GoogleError): boolean { + if (err.code === gax.Status.INVALID_ARGUMENT) { + const storageErrors = parseStorageErrors(err); + for (const storageError of storageErrors) { + if ( + storageError.errorMessage?.includes( + 'Schema mismatch due to extra fields in user schema' + ) + ) { + return true; + } + } + } + return false; + } + + private isRequestError(err: gax.GoogleError): boolean { + return err.code === gax.Status.INVALID_ARGUMENT; + } + + private resolveCallOptions( + streamId: string, + options?: gax.CallOptions + ): gax.CallOptions { + const callOptions = options || {}; + if (!callOptions.otherArgs) { + callOptions.otherArgs = {}; + } + if (!callOptions.otherArgs.headers) { + callOptions.otherArgs.headers = {}; + } + // This header is required so that the BigQuery Storage API + // knows which region to route the request to. + callOptions.otherArgs.headers[ + 'x-goog-request-params' + ] = `write_stream=${streamId}`; + return callOptions; + } + + private handleData = (response: AppendRowsResponse) => { + this.trace('data arrived', response); + const pw = this.getNextPendingWrite(); + if (!pw) { + this.trace('data arrived with no pending write available', response); + return; + } + if (response.updatedSchema) { + this.emit('schemaUpdated', response.updatedSchema); + } + this.ackNextPendingWrite(null, response); + }; + + /** + * Callback is invoked when a the server notifies the stream connection + * of a new Table Schema change. + */ + onSchemaUpdated(listener: (schema: TableSchema) => void): RemoveListener { + return this.registerListener('schemaUpdated', listener); + } + + /** + * Callback is invoked when an error is received from the server. + */ + onConnectionError(listener: (err: gax.GoogleError) => void): RemoveListener { + return this.registerListener('error', listener); + } + + private registerListener( + eventName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + listener: (...args: any[]) => void + ): RemoveListener { + this.addListener(eventName, listener); + return { + off: () => { + this.removeListener(eventName, listener); + }, + }; + } + + // check if is default stream + private isDefaultStream(): boolean { + return this._streamId.endsWith('_default'); + } + + /** + * Get the name of the write stream associated with this connection. + * When the connection is created withouth a write stream, + * this method can be used to retrieve the automatically + * created write stream name. + */ + getStreamId = (): string => { + return this._streamId; + }; + + private getNextPendingWrite(): PendingWrite | null { + if (this._pendingWrites.length > 0) { + return this._pendingWrites[0]; + } + return null; + } + + private ackNextPendingWrite( + err: Error | null, + result?: + | protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse + | undefined + ) { + const pw = this._pendingWrites.pop(); + if (pw) { + pw._markDone(err, result); + } + } + + /** + * Access in-flight write requests. + */ + getPendingWrites(): PendingWrite[] { + return [...this._pendingWrites]; + } + + /** + * Write a request to the bi-directional stream connection. + * + * @param {AppendRowRequest} request - request to send. + * + * @returns {managedwriter.PendingWrite} + */ + write(request: AppendRowRequest): PendingWrite { + this.trace('write', request); + const pw = new PendingWrite(request); + this.send(pw); + return pw; + } + + private send(pw: PendingWrite) { + const request = pw.getRequest(); + if (!this._connection) { + pw._markDone(new Error('connection closed')); + return; + } + if (this._connection.destroyed || this._connection.closed) { + this.reconnect(); + } + this.trace('sending pending write', pw); + try { + this._connection.write(request, err => { + this.trace('wrote pending write', err, this._pendingWrites.length); + if (err) { + pw._markDone(err); //TODO: add retries + return; + } + this._pendingWrites.unshift(pw); + }); + } catch (err) { + pw._markDone(err as Error); + } + } + + /** + * Check if connection is open and ready to send requests. + */ + isOpen(): boolean { + return !!this._connection; + } + + /** + * Reconnect and re send inflight requests. + */ + reconnect() { + this.trace('reconnect called'); + this.close(); + this.open(); + } + + /** + * Close the bi-directional stream connection. + */ + close() { + if (!this._connection) { + return; + } + this._connection.end(); + this._connection.removeAllListeners(); + this._connection = null; + } + + /** + * Flushes rows to a BUFFERED stream. + * If users are appending rows to BUFFERED stream, + * flush operation is required in order for the rows to become available for reading. + * A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, + * to the offset specified in the request. + * + * Flush is not supported on the DefaultStream stream, since it is not BUFFERED. + * + * @param {number|Long|string|null} request.offset + * + * @returns {Promise} + */ + async flushRows(request?: { + offset?: IInt64Value['value']; + }): Promise { + this.close(); + if (this.isDefaultStream()) { + return null; + } + let offsetValue: FlushRowsRequest['offset']; + if (request && request.offset) { + offsetValue = { + value: request.offset, + }; + } + const flushRowsReq: FlushRowsRequest = { + writeStream: this._streamId, + offset: offsetValue, + }; + + return this._writeClient.flushRows(flushRowsReq); + } + + /** + * Finalize a write stream so that no new data can be appended to the + * stream. Finalize is not supported on the DefaultStream stream. + * + * @returns {Promise} + */ + async finalize(): Promise { + this.close(); + if (this.isDefaultStream()) { + return null; + } + const finalizeStreamReq: FinalizeWriteStreamRequest = { + name: this._streamId, + }; + + return this._writeClient.finalizeWriteStream(finalizeStreamReq); + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_types.ts b/handwritten/bigquery-storage/src/managedwriter/stream_types.ts new file mode 100644 index 00000000000..45728c7df05 --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/stream_types.ts @@ -0,0 +1,75 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; + +export type WriteStream = protos.google.cloud.bigquery.storage.v1.IWriteStream; +export type WriteStreamType = WriteStream['type'] | 'DEFAULT'; + +export const WriteStreamType = + protos.google.cloud.bigquery.storage.v1.WriteStream.Type; + +/** + * DefaultStream most closely mimics the legacy bigquery + * tabledata.insertAll semantics. Successful inserts are + * committed immediately, and there's no tracking offsets as + * all writes go into a "default" stream that always exists + * for a table. + * + * @memberof managedwriter + */ +export const DefaultStream = 'DEFAULT'; + +/** + * CommittedStream appends data immediately, but creates a + * discrete stream for the work so that offset tracking can + * be used to track writes. + * + * @memberof managedwriter + */ +export const CommittedStream = 'COMMITTED'; + +/** + * BufferedStream is a form of checkpointed stream, that allows + * you to advance the offset of visible rows via Flush operations. + * + * @memberof managedwriter + */ +export const BufferedStream = 'BUFFERED'; + +/** + * PendingStream is a stream in which no data is made visible to + * readers until the stream is finalized and committed explicitly. + * + * @memberof managedwriter + */ +export const PendingStream = 'PENDING'; + +export function streamTypeToEnum( + streamType: WriteStreamType +): WriteStream['type'] { + switch (streamType) { + case WriteStreamType.BUFFERED: + case BufferedStream: + return WriteStreamType.BUFFERED; + case WriteStreamType.COMMITTED: + case CommittedStream: + return WriteStreamType.COMMITTED; + case WriteStreamType.PENDING: + case PendingStream: + return WriteStreamType.PENDING; + default: + return WriteStreamType.TYPE_UNSPECIFIED; + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/writer.ts b/handwritten/bigquery-storage/src/managedwriter/writer.ts new file mode 100644 index 00000000000..1a090da6247 --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/writer.ts @@ -0,0 +1,110 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {isDeepStrictEqual} from 'util'; +import * as protos from '../../protos/protos'; +import {PendingWrite} from './pending_write'; +import {StreamConnection} from './stream_connection'; + +type AppendRowRequest = + protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; +type IInt64Value = protos.google.protobuf.IInt64Value; +type ProtoData = + protos.google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData; +type IDescriptorProto = protos.google.protobuf.IDescriptorProto; +type DescriptorProto = protos.google.protobuf.DescriptorProto; + +const DescriptorProto = protos.google.protobuf.DescriptorProto; + +/** + * A BigQuery Storage API Writer that can be used to write data into BigQuery Table + * using the Storage API. + * + * @class + * @memberof managedwriter + */ +export class Writer { + private _protoDescriptor: DescriptorProto; + private _streamConnection: StreamConnection; + + /** + * Creates a new Writer instance. + * + * @param {Object} params - The parameters for the JSONWriter. + * @param {StreamConnection} params.connection - The stream connection + * to the BigQuery streaming insert operation. + * @param {IDescriptorProto} params.protoDescriptor - The proto descriptor + * for the JSON rows. + */ + constructor(params: { + connection: StreamConnection; + protoDescriptor: IDescriptorProto; + }) { + const {connection, protoDescriptor} = params; + this._streamConnection = connection; + this._protoDescriptor = new DescriptorProto(protoDescriptor); + } + + /** + * Update the proto descriptor for the Writer. + * Internally a reconnection event is gonna happen to apply + * the new proto descriptor. + * + * @param {IDescriptorProto} protoDescriptor - The proto descriptor. + */ + setProtoDescriptor(protoDescriptor: IDescriptorProto) { + const protoDescriptorInstance = new DescriptorProto(protoDescriptor); + if (!isDeepStrictEqual(protoDescriptorInstance, this._protoDescriptor)) { + this._protoDescriptor = new DescriptorProto(protoDescriptor); + // Reopen connection + this._streamConnection.reconnect(); + } + } + + /** + * Schedules the writing of rows at given offset. + * + * @param {google.cloud.bigquery.storage.v1.IProtoRows|null} rows - the rows in serialized format to write to BigQuery. + * @param {number|Long|string|null} offsetValue - the offset of the first row. + * @returns {managedwriter.PendingWrite} The pending write + **/ + appendRows( + rows: ProtoData['rows'], + offsetValue?: IInt64Value['value'] + ): PendingWrite { + let offset: AppendRowRequest['offset']; + if (offsetValue) { + offset = { + value: offsetValue, + }; + } + const request: AppendRowRequest = { + writeStream: this._streamConnection.getStreamId(), + protoRows: { + rows, + writerSchema: { + protoDescriptor: this._protoDescriptor.toJSON(), + }, + }, + offset, + }; + + const pw = this._streamConnection.write(request); + return pw; + } + + close() { + this._streamConnection.close(); + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts new file mode 100644 index 00000000000..8d9d1747333 --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts @@ -0,0 +1,304 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as gax from 'google-gax'; +import type {CallOptions, ClientOptions} from 'google-gax'; +import * as protos from '../../protos/protos'; + +import {BigQueryWriteClient} from '../v1'; +import {WriteStreamType, DefaultStream, streamTypeToEnum} from './stream_types'; +import {StreamConnection} from './stream_connection'; + +type StreamConnections = { + connectionList: StreamConnection[]; + connections: Record; +}; +type CreateWriteStreamRequest = + protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest; +type BatchCommitWriteStreamsRequest = + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest; +type BatchCommitWriteStreamsResponse = + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse; +type FlushRowsRequest = + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest; +type FlushRowsResponse = + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse; +type FinalizeWriteStreamRequest = + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest; +type FinalizeWriteStreamResponse = + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse; + +/** + * BigQuery Write API Client. + * The Write API can be used to write data to BigQuery. + * + * This class provides the ability to make remote calls to the backing service through method + * calls that map to API methods. + * + * For supplementary information about the Write API, see: + * https://cloud.google.com/bigquery/docs/write-api + * + * @class + * @memberof managedwriter + */ +export class WriterClient { + private _client: BigQueryWriteClient; + private _connections: StreamConnections; + private _open: boolean; + + constructor(opts?: ClientOptions) { + const baseOptions = { + 'grpc.keepalive_time_ms': 30 * 1000, + 'grpc.keepalive_timeout_ms': 10 * 1000, + }; + this._client = new BigQueryWriteClient({ + ...baseOptions, + ...opts, + }); + this._connections = { + connectionList: [], + connections: {}, + }; + this._open = false; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves when auth is complete. + */ + initialize = async (): Promise => { + await this._client.initialize(); + this._open = true; + }; + + getClient = (): BigQueryWriteClient => { + return this._client; + }; + + setClient = (client: BigQueryWriteClient): void => { + this._client = client; + }; + + /** + * Check if client is open and ready to send requests. + */ + isOpen(): boolean { + return this._open; + } + + /** + * Creates a write stream to the given table. + * Additionally, every table has a special stream named DefaultStream + * to which data can be written. This stream doesn't need to be created using + * createWriteStream. It is a stream that can be used simultaneously by any + * number of clients. Data written to this stream is considered committed as + * soon as an acknowledgement is received. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.streamType + * Required. The type of stream to create. + * @param {string} request.destinationTable + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @returns {Promise}} - The promise which resolves to the streamId. + */ + async createWriteStream(request: { + streamType: WriteStreamType; + destinationTable: string; + }): Promise { + await this.initialize(); + const {streamType, destinationTable} = request; + const createReq: CreateWriteStreamRequest = { + parent: destinationTable, + writeStream: { + type: streamTypeToEnum(streamType), + }, + }; + const [response] = await this._client.createWriteStream(createReq); + if (typeof [response] === undefined) { + throw new gax.GoogleError(`${response}`); + } + try { + if (response.name) { + const streamId = response.name; + return streamId; + } + return ''; + } catch { + throw new Error('Stream connection failed'); + } + } + + /** + * Open StreamConnection in which data can be appended to the given stream. + * + * If a stream is created beforehand with `createWriteStream`, the streamId can be passed here. + * + * Or destinationTable + streamType can be passed so the WriteStreamStream is created under the hood. + * + * @param {object} [request] + * @param {string?} request.streamId + * Optional. The ID of the stream to open. + * @param {string?} request.destinationTable + * Optional. Parent table that all the streams should belong to, in the form + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {string?} request.streamType + * Optional. The type of stream to create. If not specified, the default is `DEFAULT`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {StreamConnection} - stream which rows can be appended to. + */ + async createStreamConnection( + request: { + streamId?: string; + destinationTable?: string; + streamType?: WriteStreamType; + }, + options?: CallOptions + ): Promise { + await this.initialize(); + const {streamId, streamType, destinationTable} = request; + try { + const fullStreamId = await this.resolveStreamId( + streamId, + streamType, + destinationTable + ); + const streamConnection = new StreamConnection( + fullStreamId, + this, + options + ); + this._connections.connectionList.push(streamConnection); + this._connections.connections[`${streamId}`] = streamConnection; + return streamConnection; + } catch (err) { + throw new Error('managed stream connection failed:' + err); + } + } + + private async resolveStreamId( + streamId?: string, + streamType?: WriteStreamType, + destinationTable?: string + ): Promise { + if (streamId && streamId !== '') { + if (streamId === DefaultStream) { + if (destinationTable !== '') { + return `${destinationTable}/streams/_default`; + } else { + throw new Error('destinationTable needed if DefaultStream informed'); + } + } + return streamId; + } + if (destinationTable) { + if (streamType) { + streamId = await this.createWriteStream({ + streamType, + destinationTable, + }); + return streamId; + } + return `${destinationTable}/streams/_default`; + } + throw new Error( + 'streamId or destinationTable required to create write stream' + ); + } + + close() { + this._connections.connectionList.map(conn => { + conn.close(); + }); + this._open = false; + } + + /** + * Atomically commits a group of `PENDING` streams that belong to the same + * `parent` table. + * + * Streams must be finalized before commit and cannot be committed multiple + * times. Once a stream is committed, data in the stream becomes available + * for read operations. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Parent table that all the streams should belong to, in the form + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @param {string[]} request.writeStreams + * Required. The group of streams that will be committed atomically. + * @returns {Promise} - a promise which resolves to an {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse | BatchCommitWriteStreamsResponse}. + */ + async batchCommitWriteStream( + request: BatchCommitWriteStreamsRequest + ): Promise { + await this.initialize(); + const [res] = await this._client.batchCommitWriteStreams(request); + return res; + } + + /** + * Flushes rows to a BUFFERED stream. + * + * If users are appending rows to BUFFERED stream, flush operation is + * required in order for the rows to become available for reading. A + * Flush operation flushes up to any previously flushed offset in a BUFFERED + * stream, to the offset specified in the request. + * + * Flush is not supported on the DEFAULT stream, since it is not BUFFERED. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.writeStream + * Required. The stream that is the target of the flush operation. + * @param {google.protobuf.Int64Value} request.offset + * Ending offset of the flush operation. Rows before this offset(including + * this offset) will be flushed. + * @returns {Promise} - The promise which resolves to a {@link google.cloud.bigquery.storage.v1.FlushRowsResponse | FlushRowsResponse}. + */ + async flushRows(request?: FlushRowsRequest): Promise { + await this.initialize(); + const [res] = await this._client.flushRows(request); + return res; + } + + /** + * Finalize a write stream so that no new data can be appended to the + * stream. Finalize is not supported on the DefaultStream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. Name of the stream to finalize, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. + * @returns {Promise} - A promise which resolves to a {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse | FinalizeWriteStreamResponse}. + */ + async finalizeWriteStream( + request: FinalizeWriteStreamRequest + ): Promise { + await this.initialize(); + const [res] = await this._client.finalizeWriteStream(request); + return res; + } +} diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts new file mode 100644 index 00000000000..8ea0a09123a --- /dev/null +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -0,0 +1,936 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it, xit} from 'mocha'; +import * as uuid from 'uuid'; +import * as gax from 'google-gax'; +import {BigQuery, TableSchema} from '@google-cloud/bigquery'; +import * as protos from '../protos/protos'; +import * as bigquerywriter from '../src'; +import {ClientOptions, protobuf} from 'google-gax'; +import * as customerRecordProtoJson from '../samples/customer_record.json'; + +const {managedwriter, adapt} = bigquerywriter; +const {WriterClient, Writer, JSONWriter, parseStorageErrors} = managedwriter; +const {Type} = protobuf; + +if (process.env.NODE_ENV === 'DEBUG') { + managedwriter.setLogFunction(console.log); +} + +type WriteStream = protos.google.cloud.bigquery.storage.v1.IWriteStream; +type DescriptorProto = protos.google.protobuf.IDescriptorProto; +type IInt64Value = protos.google.protobuf.IInt64Value; +type AppendRowsResponse = + protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse; + +const FieldDescriptorProtoType = + protos.google.protobuf.FieldDescriptorProto.Type; + +const GCLOUD_TESTS_PREFIX = 'nodejs_bqstorage_system_test'; +const bigquery = new BigQuery(); +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); +const datasetId = generateUuid(); + +const root = protobuf.Root.fromJSON(customerRecordProtoJson); +if (!root) { + throw Error('Proto must not be undefined'); +} +const CustomerRecord = root.lookupType('CustomerRecord'); + +describe('managedwriter.WriterClient', () => { + let projectId: string; + let parent: string; + let tableId: string; + let bqWriteClient: bigquerywriter.BigQueryWriteClient; + let clientOptions: ClientOptions; + const schema: TableSchema = { + fields: [ + { + name: 'customer_name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'row_num', + type: 'INTEGER', + mode: 'REQUIRED', + }, + ], + }; + const protoDescriptor: DescriptorProto = { + name: 'CustomerRecord', + field: [ + { + name: 'customer_name', + number: 1, + type: FieldDescriptorProtoType.TYPE_STRING, + }, + { + name: 'row_num', + number: 2, + type: FieldDescriptorProtoType.TYPE_INT64, + }, + ], + }; + + before(async () => { + await deleteDatasets(); + + await bigquery.createDataset(datasetId); + }); + + beforeEach(async () => { + tableId = generateUuid(); + + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, {schema}); + + projectId = table.metadata.tableReference.projectId; + + parent = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; + }); + + after(async () => { + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); + }); + + beforeEach(async () => { + clientOptions = { + projectId: projectId, + 'grpc.keepalive_time_ms': 30 * 1000, + 'grpc.keepalive_timeout_ms': 10 * 1000, + }; + bqWriteClient = new bigquerywriter.BigQueryWriteClient(clientOptions); + }); + + afterEach(async () => { + await bqWriteClient.close(); + }); + + describe('Common methods', () => { + it('should create a client without arguments', () => { + const client = new WriterClient(); + assert(client.getClient()); + }); + + it('should create a client with arguments: parent, client, opts, writeStream', async () => { + const client = new WriterClient(clientOptions); + assert(client.getClient()); + const clientId = await client.getClient().getProjectId(); + assert.strictEqual(clientId, clientOptions.projectId); + }); + }); + + describe('Writer', () => { + it('should invoke appendRows without errors', async () => { + bqWriteClient.initialize(); + const streamType: WriteStream['type'] = managedwriter.PendingStream; + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + const row1Message = CustomerRecord.create(row1); + const serializedRow1Message: Uint8Array = + CustomerRecord.encode(row1Message).finish(); + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + const row2Message = CustomerRecord.create(row2); + const serializedRow2Message: Uint8Array = + CustomerRecord.encode(row2Message).finish(); + + const offset: IInt64Value['value'] = '0'; + + const streamId = await client.createWriteStream({ + streamType, + destinationTable: parent, + }); + const appendRowsResponsesResult: AppendRowsResponse[] = [ + { + appendResult: { + offset: { + value: offset, + }, + }, + writeStream: streamId, + }, + ]; + try { + const connection = await client.createStreamConnection({ + streamId, + }); + const writer = new Writer({ + connection, + protoDescriptor, + }); + const pw = writer.appendRows( + { + serializedRows: [serializedRow1Message, serializedRow2Message], + }, + offset + ); + const result = await pw.getResult(); + const responses: AppendRowsResponse[] = [ + { + appendResult: result.appendResult, + writeStream: result.writeStream, + }, + ]; + + assert.deepEqual(appendRowsResponsesResult, responses); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 2); + + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + } finally { + client.close(); + } + }); + + it('should invoke appendRows to default stream without errors', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Lovelace', + row_num: 1, + }; + const row1Message = CustomerRecord.create(row1); + const serializedRow1Message: Uint8Array = + CustomerRecord.encode(row1Message).finish(); + + // Row 2 + const row2 = { + customer_name: 'Turing', + row_num: 2, + }; + const row2Message = CustomerRecord.create(row2); + const serializedRow2Message: Uint8Array = + CustomerRecord.encode(row2Message).finish(); + + const appendRowsResponsesResult: AppendRowsResponse[] = [ + { + appendResult: { + offset: null, + }, + writeStream: parent + '/streams/_default', + }, + { + appendResult: { + offset: null, + }, + writeStream: parent + '/streams/_default', + }, + ]; + try { + const connection = await client.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable: parent, + }); + const writer = new Writer({ + connection, + protoDescriptor, + }); + const pw1 = await writer.appendRows({ + serializedRows: [serializedRow1Message, serializedRow2Message], + }); + const pw2 = await writer.appendRows({ + serializedRows: [serializedRow1Message, serializedRow2Message], + }); + const results = await Promise.all([pw1.getResult(), pw2.getResult()]); + const responses: AppendRowsResponse[] = results.map(result => ({ + appendResult: result.appendResult, + writeStream: result.writeStream, + })); + + assert.deepEqual(appendRowsResponsesResult, responses); + + writer.close(); + client.close(); + } finally { + client.close(); + } + }); + + it('should invoke createWriteStream when streamType and destination table informed to createStreamConnection', async () => { + bqWriteClient.initialize(); + const streamType: WriteStream['type'] = managedwriter.PendingStream; + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Lovelace', + row_num: 1, + }; + const row1Message = CustomerRecord.create(row1); + const serializedRow1Message: Uint8Array = + CustomerRecord.encode(row1Message).finish(); + + // Row 2 + const row2 = { + customer_name: 'Turing', + row_num: 2, + }; + const row2Message = CustomerRecord.create(row2); + const serializedRow2Message: Uint8Array = + CustomerRecord.encode(row2Message).finish(); + + try { + const connection = await client.createStreamConnection({ + streamType, + destinationTable: parent, + }); + const streamId = connection.getStreamId(); + const writer = new Writer({ + connection, + protoDescriptor, + }); + const pw1 = await writer.appendRows({ + serializedRows: [serializedRow1Message, serializedRow2Message], + }); + const pw2 = await writer.appendRows({ + serializedRows: [serializedRow1Message, serializedRow2Message], + }); + const results = await Promise.all([pw1.getResult(), pw2.getResult()]); + const responses: AppendRowsResponse[] = results.map(result => ({ + appendResult: result.appendResult, + writeStream: result.writeStream, + })); + + const appendRowsResponsesResult: AppendRowsResponse[] = [ + { + appendResult: { + offset: { + value: '0', + }, + }, + writeStream: streamId, + }, + { + appendResult: { + offset: { + value: '2', + }, + }, + writeStream: streamId, + }, + ]; + assert.deepEqual(appendRowsResponsesResult, responses); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 4); + + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + } finally { + client.close(); + } + }); + }); + + describe('JSONWriter', () => { + it('should invoke appendRows without errors', async () => { + bqWriteClient.initialize(); + const streamType: WriteStream['type'] = managedwriter.PendingStream; + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + + const offset: IInt64Value['value'] = '0'; + + const streamId = await client.createWriteStream({ + streamType, + destinationTable: parent, + }); + const appendRowsResponsesResult: AppendRowsResponse[] = [ + { + appendResult: { + offset: { + value: offset, + }, + }, + writeStream: streamId, + }, + ]; + try { + const connection = await client.createStreamConnection({ + streamId, + }); + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + const pw = writer.appendRows([row1, row2], offset); + const result = await pw.getResult(); + const responses: AppendRowsResponse[] = [ + { + appendResult: result.appendResult, + writeStream: result.writeStream, + }, + ]; + + assert.deepEqual(appendRowsResponsesResult, responses); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 2); + + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + + writer.close(); + } finally { + client.close(); + } + }); + + it('should update proto descriptor automatically with appendRows without errors', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + + let receivedSchemaNotification = false; + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + connection.onSchemaUpdated(schema => { + receivedSchemaNotification = !!schema; + }); + connection.onConnectionError(err => { + throw err; + }); + + const streamId = connection.getStreamId(); + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + let offset: IInt64Value['value'] = 0; + let pw = writer.appendRows([row1, row2], offset); + let result = await pw.getResult(); + + assert.equal(result.error, null); + + const updatedSchema = { + fields: [ + ...(schema.fields || []), + { + name: 'customer_email', + type: 'STRING', + }, + ], + }; + const [md] = await bigquery + .dataset(datasetId) + .table(tableId) + .setMetadata({ + schema: updatedSchema, + }); + assert.deepEqual(md.schema, updatedSchema); + + // Row with new field + const rowUpdated = { + customer_name: 'Charles Babbage', + row_num: 3, + customer_email: 'charles@babbage.com', + }; + offset = 2; + + while (!result.updatedSchema) { + pw = writer.appendRows([rowUpdated], offset); + rowUpdated.row_num++; + offset++; + result = await pw.getResult(); + } + + const updatedStorageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(updatedSchema); + assert.equal( + result.updatedSchema.fields?.length, + updatedStorageSchema.fields?.length + ); + assert.equal(receivedSchemaNotification, true); + + pw = writer.appendRows([rowUpdated], offset); + offset++; + result = await pw.getResult(); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, offset); + + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + + const [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${tableId}\` order by row_num` + ); + + assert.strictEqual(rows.length, offset); + assert.deepEqual(rows[rows.length - 1], rowUpdated); + + writer.close(); + } finally { + client.close(); + } + }).timeout(30 * 1000); + }); + + describe('Error Scenarios', () => { + it('send request with mismatched proto descriptor', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + + let storageErrors: protos.google.cloud.bigquery.storage.v1.IStorageError[] = + []; + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + connection.onConnectionError((err: gax.GoogleError) => { + storageErrors = parseStorageErrors(err); + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + let offset: IInt64Value['value'] = 0; + let pw = writer.appendRows([row1, row2], offset); + await pw.getResult(); + + protoDescriptor.field = [ + ...(protoDescriptor.field || []), + { + name: 'customer_email', + number: 3, + type: FieldDescriptorProtoType.TYPE_STRING, + }, + ]; + writer.setProtoDescriptor(protoDescriptor); + + const row3 = { + customer_name: 'Test', + row_num: 3, + customer_email: 'test@example.com', + }; + offset = 2; + + pw = writer.appendRows([row3], offset); + try { + await pw.getResult(); + } catch (err) { + assert.notEqual(err, null); + } + + assert.equal(storageErrors.length, 1); + assert.equal( + storageErrors[0].errorMessage, + 'Schema mismatch due to extra fields in user schema' + ); + + writer.close(); + } finally { + client.close(); + } + }); + + it('send request with invalid protobuf row', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new Writer({ + connection, + protoDescriptor, + }); + + protoDescriptor.field = protoDescriptor.field?.slice(0, 1); // leave just first field + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const invalidProto = (Type as any).fromDescriptor( + protoDescriptor + ) as protobuf.Type; + const row = { + customer_name: 'Test', + }; + const serialized = invalidProto.encode(row).finish(); + + const pw = writer.appendRows( + { + serializedRows: [serialized], + }, + 0 + ); + const res = await pw.getResult(); + assert.notEqual(res.error, null); + assert.equal( + res.error?.message?.split('.')[0], + 'Errors found while processing rows' + ); + + writer.close(); + } finally { + client.close(); + } + }); + + it('send empty rows request should return an error', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const pw = writer.appendRows([], 0); + const res = await pw.getResult(); + assert.notEqual(res.error, null); + assert.equal( + res.error?.message?.split('.')[0], + 'Rows must be specified' + ); + + writer.close(); + } finally { + client.close(); + } + }); + + it('send large request should return an error', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new Writer({ + connection, + protoDescriptor, + }); + + const row = { + customer_name: 'Lovelace', + row_num: 1, + }; + const rowMessage = CustomerRecord.create(row); + const serializedRowMessage: Uint8Array = + CustomerRecord.encode(rowMessage).finish(); + + const rows: Uint8Array[] = []; + const targetSize = 11 * 1024 * 1024; // 11 MB; + const numRows = targetSize / serializedRowMessage.length; + for (let i = 0; i < numRows; i++) { + rows.push(serializedRowMessage); + } + const badPw = writer.appendRows( + { + serializedRows: rows, + }, + 0 + ); + let foundErr: Error | null = null; + try { + await badPw.getResult(); + } catch (err) { + foundErr = err as Error; + } + assert.notEqual(foundErr, null); + assert.equal( + foundErr?.message.includes('contains an invalid argument.'), + true + ); + + const goodPw = writer.appendRows( + { + serializedRows: [serializedRowMessage], + }, + 0 + ); + const res = await goodPw.getResult(); + assert.equal(res.appendResult?.offset?.value, '0'); + + writer.close(); + } finally { + client.close(); + } + }); + + xit('reconnect on idle connection', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + connection.onConnectionError(err => { + console.log('idle conn err', err); + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + let pw = writer.appendRows([row1, row2], 0); + await pw.getResult(); + + const sleep = (ms: number) => + new Promise(resolve => { + setTimeout(resolve, ms); + }); + const minutes = 10; + for (let i = 0; i <= minutes; i++) { + console.log('sleeping for a minute: ', minutes - i, 'to go'); + await sleep(60 * 1000); + } + + const row3 = { + customer_name: 'Test', + row_num: 3, + customer_email: 'test@example.com', + }; + + pw = writer.appendRows([row3], 2); + await pw.getResult(); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 3); + + writer.close(); + } finally { + client.close(); + } + }).timeout(20 * 60 * 1000); + }); + + describe('close', () => { + it('should invoke close without errors', async () => { + bqWriteClient.initialize(); + const streamType: WriteStream['type'] = managedwriter.PendingStream; + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Lovelace', + row_num: 1, + }; + const row1Message = CustomerRecord.create(row1); + const serializedRow1Message: Uint8Array = + CustomerRecord.encode(row1Message).finish(); + + // Row 2 + const row2 = { + customer_name: 'Turing', + row_num: 2, + }; + const row2Message = CustomerRecord.create(row2); + const serializedRow2Message: Uint8Array = + CustomerRecord.encode(row2Message).finish(); + + const offset = 0; + try { + const streamId = await client.createWriteStream({ + streamType, + destinationTable: parent, + }); + const connection = await client.createStreamConnection({streamId}); + const writer = new Writer({ + connection, + protoDescriptor, + }); + const pw = writer.appendRows( + { + serializedRows: [serializedRow1Message, serializedRow2Message], + }, + offset + ); + await pw.getResult(); + + writer.close(); + client.close(); + assert.strictEqual(client.isOpen(), false); + } finally { + client.close(); + } + }); + }); + + // Only delete a resource if it is older than 24 hours. That will prevent + // collisions with parallel CI test runs. + function isResourceStale(creationTime: number) { + const oneDayMs = 86400000; + const now = new Date(); + const created = new Date(creationTime); + return now.getTime() - created.getTime() >= oneDayMs; + } + + async function deleteDatasets() { + let [datasets] = await bigquery.getDatasets(); + datasets = datasets.filter( + dataset => dataset.id?.includes(GCLOUD_TESTS_PREFIX) + ); + + for (const dataset of datasets) { + const [metadata] = await dataset.getMetadata(); + const creationTime = Number(metadata.creationTime); + + if (isResourceStale(creationTime)) { + try { + await dataset.delete({force: true}); + } catch (e) { + console.log(`dataset(${dataset.id}).delete() failed`); + console.log(e); + } + } + } + } +}); diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json index c78f1c884ef..b4fd9192913 100644 --- a/handwritten/bigquery-storage/tsconfig.json +++ b/handwritten/bigquery-storage/tsconfig.json @@ -7,7 +7,8 @@ "lib": [ "es2018", "dom" - ] + ], + "module": "CommonJS" }, "include": [ "src/*.ts", From 261dc5757e7a07eec7aee538ec1844751ac93862 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Aug 2023 17:15:45 -0400 Subject: [PATCH 229/333] docs: fix node release schedule link (#359) Co-authored-by: Jeffrey Rennie Source-Link: https://github.com/googleapis/synthtool/commit/1a2431537d603e95b4b32317fb494542f75a2165 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:e08f9a3757808cdaf7a377e962308c65c4d7eff12db206d4fae702dd50d43430 Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/README.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 0c47c8b71d4..a3d003c65a1 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:3ad01f4c6671efb094b43f7d3a3e0b9510bd6501f2e65e874dd525373e29de75 -# created: 2023-08-01T22:29:52.50398591Z + digest: sha256:e08f9a3757808cdaf7a377e962308c65c4d7eff12db206d4fae702dd50d43430 +# created: 2023-08-03T18:46:14.719706948Z diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index c2502aa1d6c..220dc7e4183 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -200,7 +200,7 @@ also contains samples. ## Supported Node.js Versions -Our client libraries follow the [Node.js release schedule](https://nodejs.org/en/about/releases/). +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). Libraries are compatible with all current _active_ and _maintenance_ versions of Node.js. If you are using an end-of-life version of Node.js, we recommend that you update From 159907c87c711812c7dce44ad6417ce7fe5bfaa3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Aug 2023 16:28:10 +0200 Subject: [PATCH 230/333] chore(deps): update dependency @google-cloud/bigquery to v7 (#361) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 04fedc55985..d090af9a88c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -30,7 +30,7 @@ "google-gax": "^4.0.3" }, "devDependencies": { - "@google-cloud/bigquery": "^6.1.0", + "@google-cloud/bigquery": "^7.0.0", "@types/uuid": "^9.0.1", "@types/mocha": "^9.0.0", "@types/node": "^18.0.0", From f38d9a9320ff73e825dca3f0aee2311a5b94de5e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Aug 2023 16:38:02 +0200 Subject: [PATCH 231/333] chore(deps): update dependency jsdoc-region-tag to v3 (#363) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d090af9a88c..344997dbbea 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -40,7 +40,7 @@ "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", + "jsdoc-region-tag": "^3.0.0", "linkinator": "^5.0.0", "mocha": "^9.2.2", "null-loader": "^4.0.0", From 97f29a9fb45ab9c7a88e5676f05c2a4e9e388570 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Aug 2023 17:41:17 +0200 Subject: [PATCH 232/333] chore(deps): update dependency jsdoc-fresh to v3 (#362) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 344997dbbea..d3b5b3da70a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -39,7 +39,7 @@ "gapic-tools": "^0.1.8", "gts": "^5.0.0", "jsdoc": "^4.0.0", - "jsdoc-fresh": "^2.0.0", + "jsdoc-fresh": "^3.0.0", "jsdoc-region-tag": "^3.0.0", "linkinator": "^5.0.0", "mocha": "^9.2.2", From 04c86871abe6fe21204501dae7b0f4c4d742b47e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 09:19:32 -0400 Subject: [PATCH 233/333] chore(main): release 4.1.0 (#360) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index ddf45e00427..61d2585abf7 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.0.0...v4.1.0) (2023-08-11) + + +### Features + +* Storage write api veneer ([#328](https://github.com/googleapis/nodejs-bigquery-storage/issues/328)) ([146141e](https://github.com/googleapis/nodejs-bigquery-storage/commit/146141e7973c9e6642174d34b284be605e90582b)) + ## [4.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v3.4.0...v4.0.0) (2023-08-01) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d3b5b3da70a..6772cd5b9ba 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.0.0", + "version": "4.1.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 26de33d3b16..dc7e6a04e83 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.0.0", + "version": "4.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 9b3533d6c6a..177857f71b8 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.0.0", + "version": "4.1.0", "language": "TYPESCRIPT", "apis": [ { From 6619886578dad45247600d4d58b3ae5386f05a4b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 14:18:39 -0700 Subject: [PATCH 234/333] feat: add default_missing_value_interpretation field; indicate KMS_SERVICE_ERROR is retryable (#347) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: disable retry-request for streaming tests PiperOrigin-RevId: 554648220 Source-Link: https://github.com/googleapis/googleapis/commit/53cd9ad1b48e40cdd44e0c13e96ac0281b32828f Source-Link: https://github.com/googleapis/googleapis-gen/commit/7e8867efbed7dbfe5ef6ec3c2c92a4bce4280f7a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2U4ODY3ZWZiZWQ3ZGJmZTVlZjZlYzNjMmM5MmE0YmNlNDI4MGY3YSJ9 fix: fix typings for IAM methods docs: fixed links in the generated Markdown documentation PiperOrigin-RevId: 551610576 Source-Link: https://github.com/googleapis/googleapis/commit/73b1313cbd1fd0cc1e22684bc89ee1b1a416cfe0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8bec066492a6da2855b1b8ce562664c0a6b30b01 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGJlYzA2NjQ5MmE2ZGEyODU1YjFiOGNlNTYyNjY0YzBhNmIzMGIwMSJ9 feat: add ResourceExhausted to retryable error for Write API unary calls docs: add multiplexing documentation PiperOrigin-RevId: 545839491 Source-Link: https://github.com/googleapis/googleapis/commit/2b006afc7a392006602ce0868c22341b5aeef4a8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0d52d385bd4e78c7b2c83755013fe103e804c384 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGQ1MmQzODViZDRlNzhjN2IyYzgzNzU1MDEzZmUxMDNlODA0YzM4NCJ9 feat: add estimated physical file sizes to ReadAPI v1 PiperOrigin-RevId: 542350532 Source-Link: https://github.com/googleapis/googleapis/commit/a4ff1c210c20efa3b81ecd3912936f96fcd0d708 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b5b5fe5dcd4bce15b7b9035b925452ee7caf489b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjViNWZlNWRjZDRiY2UxNWI3YjkwMzViOTI1NDUyZWU3Y2FmNDg5YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add beta2 deprecation message PiperOrigin-RevId: 556875976 Source-Link: https://github.com/googleapis/googleapis/commit/29b8ec7ad787cdbe18ed5f0ea19a62c8955d83e8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cc401b7cac5c77c681056c788ab1c8e2025e7ba6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2M0MDFiN2NhYzVjNzdjNjgxMDU2Yzc4OGFiMWM4ZTIwMjVlN2JhNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add default_missing_value_interpretation field; indicate KMS_SERVICE_ERROR is retryable PiperOrigin-RevId: 556931084 Source-Link: https://github.com/googleapis/googleapis/commit/ca3d7e8f35834688b137a4ee9915b4521477876e Source-Link: https://github.com/googleapis/googleapis-gen/commit/861a1c34624a57251345c820d69887a495ee417a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODYxYTFjMzQ2MjRhNTcyNTEzNDVjODIwZDY5ODg3YTQ5NWVlNDE3YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> --- .../cloud/bigquery/storage/v1/storage.proto | 68 ++- .../cloud/bigquery/storage/v1/stream.proto | 18 +- .../bigquery-storage/protos/protos.d.ts | 12 + handwritten/bigquery-storage/protos/protos.js | 84 ++++ .../bigquery-storage/protos/protos.json | 14 + .../v1/big_query_write.append_rows.js | 33 +- ..._query_write.batch_commit_write_streams.js | 2 +- ...data_google.cloud.bigquery.storage.v1.json | 431 ++++++++++++++++++ ...google.cloud.bigquery.storage.v1beta1.json | 247 ++++++++++ .../src/v1/big_query_read_client.ts | 15 +- .../src/v1/big_query_write_client.ts | 32 +- .../src/v1/big_query_write_client_config.json | 8 +- .../src/v1beta1/big_query_storage_client.ts | 25 +- .../bigquery-storage/system-test/install.ts | 16 +- .../test/gapic_big_query_read_v1.ts | 4 +- .../test/gapic_big_query_storage_v1beta1.ts | 4 +- handwritten/bigquery-storage/tsconfig.json | 3 +- 17 files changed, 925 insertions(+), 91 deletions(-) create mode 100644 handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index d28c36f43f4..187bf549dac 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -397,9 +397,10 @@ message CreateWriteStreamRequest { // Request message for `AppendRows`. // -// Due to the nature of AppendRows being a bidirectional streaming RPC, certain -// parts of the AppendRowsRequest need only be specified for the first request -// sent each time the gRPC network connection is opened/reopened. +// Because AppendRows is a bidirectional streaming RPC, certain parts of the +// AppendRowsRequest need only be specified for the first request before +// switching table destinations. You can also switch table destinations within +// the same connection for the default stream. // // The size of a single AppendRowsRequest must be less than 10 MB in size. // Requests larger than this return an error, typically `INVALID_ARGUMENT`. @@ -407,9 +408,14 @@ message AppendRowsRequest { // ProtoData contains the data rows and schema when constructing append // requests. message ProtoData { - // Proto schema used to serialize the data. This value only needs to be - // provided as part of the first request on a gRPC network connection, - // and will be ignored for subsequent requests on the connection. + // The protocol buffer schema used to serialize the data. Provide this value + // whenever: + // + // * You send the first request of an RPC connection. + // + // * You change the input schema. + // + // * You specify a new destination table. ProtoSchema writer_schema = 1; // Serialized row data in protobuf message format. @@ -419,10 +425,9 @@ message AppendRowsRequest { ProtoRows rows = 2; } - // An enum to indicate how to interpret missing values. Missing values are - // fields present in user schema but missing in rows. A missing value can - // represent a NULL or a column default value defined in BigQuery table - // schema. + // An enum to indicate how to interpret missing values of fields that are + // present in user schema but missing in rows. A missing value can represent a + // NULL or a column default value defined in BigQuery table schema. enum MissingValueInterpretation { // Invalid missing value interpretation. Requests with this value will be // rejected. @@ -436,10 +441,14 @@ message AppendRowsRequest { DEFAULT_VALUE = 2; } - // Required. The write_stream identifies the target of the append operation, - // and only needs to be specified as part of the first request on the gRPC - // connection. If provided for subsequent requests, it must match the value of - // the first request. + // Required. The write_stream identifies the append operation. It must be + // provided in the following scenarios: + // + // * In the first request to an AppendRows connection. + // + // * In all subsequent requests to an AppendRows connection, if you use the + // same connection to write to multiple tables or change the input schema for + // default streams. // // For explicitly created write streams, the format is: // @@ -448,6 +457,22 @@ message AppendRowsRequest { // For the special default stream, the format is: // // * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. + // + // An example of a possible sequence of requests with write_stream fields + // within a single connection: + // + // * r1: {write_stream: stream_name_1} + // + // * r2: {write_stream: /*omit*/} + // + // * r3: {write_stream: /*omit*/} + // + // * r4: {write_stream: stream_name_2} + // + // * r5: {write_stream: stream_name_2} + // + // The destination changed in request_4, so the write_stream field must be + // populated in all subsequent requests in this stream. string write_stream = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -493,6 +518,18 @@ message AppendRowsRequest { // Currently, field name can only be top-level column name, can't be a struct // field path like 'foo.bar'. map missing_value_interpretations = 7; + + // Optional. Default missing value interpretation for all columns in the + // table. When a value is specified on an `AppendRowsRequest`, it is applied + // to all requests on the connection from that point forward, until a + // subsequent `AppendRowsRequest` sets it to a different value. + // `missing_value_interpretation` can override + // `default_missing_value_interpretation`. For example, if you want to write + // `NULL` instead of using default values for some columns, you can set + // `default_missing_value_interpretation` to `DEFAULT_VALUE` and at the same + // time, set `missing_value_interpretations` to `NULL_VALUE` on those columns. + MissingValueInterpretation default_missing_value_interpretation = 8 + [(google.api.field_behavior) = OPTIONAL]; } // Response message for `AppendRows`. @@ -680,7 +717,8 @@ message StorageError { // There is an encryption error while using customer-managed encryption key. CMEK_ENCRYPTION_ERROR = 12; - // Key Management Service (KMS) service returned an error. + // Key Management Service (KMS) service returned an error, which can be + // retried. KMS_SERVICE_ERROR = 13; // Permission denied while using customer-managed encryption key. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index e72932e187d..785c74f788d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -131,11 +131,11 @@ message ReadSession { } // Optional. Specifies a table sampling percentage. Specifically, the query - // planner will use TABLESAMPLE SYSTEM (sample_percentage PERCENT). This - // samples at the file-level. It will randomly choose for each file whether - // to include that file in the sample returned. Note, that if the table only - // has one file, then TABLESAMPLE SYSTEM will select that file and return - // all returnable rows contained within. + // planner will use TABLESAMPLE SYSTEM (sample_percentage PERCENT). The + // sampling percentage is applied at the data block granularity. It will + // randomly choose for each data block whether to read the rows in that data + // block. For more details, see + // https://cloud.google.com/bigquery/docs/table-sampling) optional double sample_percentage = 5 [(google.api.field_behavior) = OPTIONAL]; } @@ -194,6 +194,14 @@ message ReadSession { int64 estimated_total_bytes_scanned = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. A pre-projected estimate of the total physical size of files + // (in bytes) that this session will scan when all streams are consumed. This + // estimate is independent of the selected columns and can be based on + // incomplete or stale metadata from the table. This field is only set for + // BigLake tables. + int64 estimated_total_physical_file_size = 15 + [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. An estimate on the number of rows present in this session's // streams. This estimate is based on metadata from the table which might be // incomplete or stale. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 085f8586882..04a3856b455 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -2047,6 +2047,9 @@ export namespace google { /** AppendRowsRequest missingValueInterpretations */ missingValueInterpretations?: ({ [k: string]: google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation }|null); + + /** AppendRowsRequest defaultMissingValueInterpretation */ + defaultMissingValueInterpretation?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|keyof typeof google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|null); } /** Represents an AppendRowsRequest. */ @@ -2073,6 +2076,9 @@ export namespace google { /** AppendRowsRequest missingValueInterpretations. */ public missingValueInterpretations: { [k: string]: google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation }; + /** AppendRowsRequest defaultMissingValueInterpretation. */ + public defaultMissingValueInterpretation: (google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|keyof typeof google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation); + /** AppendRowsRequest rows. */ public rows?: "protoRows"; @@ -3483,6 +3489,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned */ estimatedTotalBytesScanned?: (number|Long|string|null); + /** ReadSession estimatedTotalPhysicalFileSize */ + estimatedTotalPhysicalFileSize?: (number|Long|string|null); + /** ReadSession estimatedRowCount */ estimatedRowCount?: (number|Long|string|null); @@ -3529,6 +3538,9 @@ export namespace google { /** ReadSession estimatedTotalBytesScanned. */ public estimatedTotalBytesScanned: (number|Long|string); + /** ReadSession estimatedTotalPhysicalFileSize. */ + public estimatedTotalPhysicalFileSize: (number|Long|string); + /** ReadSession estimatedRowCount. */ public estimatedRowCount: (number|Long|string); diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 903e738c0a8..4017976d4fc 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -4533,6 +4533,7 @@ * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows * @property {string|null} [traceId] AppendRowsRequest traceId * @property {Object.|null} [missingValueInterpretations] AppendRowsRequest missingValueInterpretations + * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|null} [defaultMissingValueInterpretation] AppendRowsRequest defaultMissingValueInterpretation */ /** @@ -4591,6 +4592,14 @@ */ AppendRowsRequest.prototype.missingValueInterpretations = $util.emptyObject; + /** + * AppendRowsRequest defaultMissingValueInterpretation. + * @member {google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation} defaultMissingValueInterpretation + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.defaultMissingValueInterpretation = 0; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -4640,6 +4649,8 @@ if (message.missingValueInterpretations != null && Object.hasOwnProperty.call(message, "missingValueInterpretations")) for (var keys = Object.keys(message.missingValueInterpretations), i = 0; i < keys.length; ++i) writer.uint32(/* id 7, wireType 2 =*/58).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 0 =*/16).int32(message.missingValueInterpretations[keys[i]]).ldelim(); + if (message.defaultMissingValueInterpretation != null && Object.hasOwnProperty.call(message, "defaultMissingValueInterpretation")) + writer.uint32(/* id 8, wireType 0 =*/64).int32(message.defaultMissingValueInterpretation); return writer; }; @@ -4713,6 +4724,10 @@ message.missingValueInterpretations[key] = value; break; } + case 8: { + message.defaultMissingValueInterpretation = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -4782,6 +4797,15 @@ break; } } + if (message.defaultMissingValueInterpretation != null && message.hasOwnProperty("defaultMissingValueInterpretation")) + switch (message.defaultMissingValueInterpretation) { + default: + return "defaultMissingValueInterpretation: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -4837,6 +4861,26 @@ break; } } + switch (object.defaultMissingValueInterpretation) { + default: + if (typeof object.defaultMissingValueInterpretation === "number") { + message.defaultMissingValueInterpretation = object.defaultMissingValueInterpretation; + break; + } + break; + case "MISSING_VALUE_INTERPRETATION_UNSPECIFIED": + case 0: + message.defaultMissingValueInterpretation = 0; + break; + case "NULL_VALUE": + case 1: + message.defaultMissingValueInterpretation = 1; + break; + case "DEFAULT_VALUE": + case 2: + message.defaultMissingValueInterpretation = 2; + break; + } return message; }; @@ -4859,6 +4903,7 @@ object.writeStream = ""; object.offset = null; object.traceId = ""; + object.defaultMissingValueInterpretation = options.enums === String ? "MISSING_VALUE_INTERPRETATION_UNSPECIFIED" : 0; } if (message.writeStream != null && message.hasOwnProperty("writeStream")) object.writeStream = message.writeStream; @@ -4877,6 +4922,8 @@ for (var j = 0; j < keys2.length; ++j) object.missingValueInterpretations[keys2[j]] = options.enums === String ? $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.missingValueInterpretations[keys2[j]]] === undefined ? message.missingValueInterpretations[keys2[j]] : $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.missingValueInterpretations[keys2[j]]] : message.missingValueInterpretations[keys2[j]]; } + if (message.defaultMissingValueInterpretation != null && message.hasOwnProperty("defaultMissingValueInterpretation")) + object.defaultMissingValueInterpretation = options.enums === String ? $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.defaultMissingValueInterpretation] === undefined ? message.defaultMissingValueInterpretation : $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation[message.defaultMissingValueInterpretation] : message.defaultMissingValueInterpretation; return object; }; @@ -8049,6 +8096,7 @@ * @property {google.cloud.bigquery.storage.v1.ReadSession.ITableReadOptions|null} [readOptions] ReadSession readOptions * @property {Array.|null} [streams] ReadSession streams * @property {number|Long|null} [estimatedTotalBytesScanned] ReadSession estimatedTotalBytesScanned + * @property {number|Long|null} [estimatedTotalPhysicalFileSize] ReadSession estimatedTotalPhysicalFileSize * @property {number|Long|null} [estimatedRowCount] ReadSession estimatedRowCount * @property {string|null} [traceId] ReadSession traceId */ @@ -8149,6 +8197,14 @@ */ ReadSession.prototype.estimatedTotalBytesScanned = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** + * ReadSession estimatedTotalPhysicalFileSize. + * @member {number|Long} estimatedTotalPhysicalFileSize + * @memberof google.cloud.bigquery.storage.v1.ReadSession + * @instance + */ + ReadSession.prototype.estimatedTotalPhysicalFileSize = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + /** * ReadSession estimatedRowCount. * @member {number|Long} estimatedRowCount @@ -8228,6 +8284,8 @@ writer.uint32(/* id 13, wireType 2 =*/106).string(message.traceId); if (message.estimatedRowCount != null && Object.hasOwnProperty.call(message, "estimatedRowCount")) writer.uint32(/* id 14, wireType 0 =*/112).int64(message.estimatedRowCount); + if (message.estimatedTotalPhysicalFileSize != null && Object.hasOwnProperty.call(message, "estimatedTotalPhysicalFileSize")) + writer.uint32(/* id 15, wireType 0 =*/120).int64(message.estimatedTotalPhysicalFileSize); return writer; }; @@ -8304,6 +8362,10 @@ message.estimatedTotalBytesScanned = reader.int64(); break; } + case 15: { + message.estimatedTotalPhysicalFileSize = reader.int64(); + break; + } case 14: { message.estimatedRowCount = reader.int64(); break; @@ -8408,6 +8470,9 @@ if (message.estimatedTotalBytesScanned != null && message.hasOwnProperty("estimatedTotalBytesScanned")) if (!$util.isInteger(message.estimatedTotalBytesScanned) && !(message.estimatedTotalBytesScanned && $util.isInteger(message.estimatedTotalBytesScanned.low) && $util.isInteger(message.estimatedTotalBytesScanned.high))) return "estimatedTotalBytesScanned: integer|Long expected"; + if (message.estimatedTotalPhysicalFileSize != null && message.hasOwnProperty("estimatedTotalPhysicalFileSize")) + if (!$util.isInteger(message.estimatedTotalPhysicalFileSize) && !(message.estimatedTotalPhysicalFileSize && $util.isInteger(message.estimatedTotalPhysicalFileSize.low) && $util.isInteger(message.estimatedTotalPhysicalFileSize.high))) + return "estimatedTotalPhysicalFileSize: integer|Long expected"; if (message.estimatedRowCount != null && message.hasOwnProperty("estimatedRowCount")) if (!$util.isInteger(message.estimatedRowCount) && !(message.estimatedRowCount && $util.isInteger(message.estimatedRowCount.low) && $util.isInteger(message.estimatedRowCount.high))) return "estimatedRowCount: integer|Long expected"; @@ -8497,6 +8562,15 @@ message.estimatedTotalBytesScanned = object.estimatedTotalBytesScanned; else if (typeof object.estimatedTotalBytesScanned === "object") message.estimatedTotalBytesScanned = new $util.LongBits(object.estimatedTotalBytesScanned.low >>> 0, object.estimatedTotalBytesScanned.high >>> 0).toNumber(); + if (object.estimatedTotalPhysicalFileSize != null) + if ($util.Long) + (message.estimatedTotalPhysicalFileSize = $util.Long.fromValue(object.estimatedTotalPhysicalFileSize)).unsigned = false; + else if (typeof object.estimatedTotalPhysicalFileSize === "string") + message.estimatedTotalPhysicalFileSize = parseInt(object.estimatedTotalPhysicalFileSize, 10); + else if (typeof object.estimatedTotalPhysicalFileSize === "number") + message.estimatedTotalPhysicalFileSize = object.estimatedTotalPhysicalFileSize; + else if (typeof object.estimatedTotalPhysicalFileSize === "object") + message.estimatedTotalPhysicalFileSize = new $util.LongBits(object.estimatedTotalPhysicalFileSize.low >>> 0, object.estimatedTotalPhysicalFileSize.high >>> 0).toNumber(); if (object.estimatedRowCount != null) if ($util.Long) (message.estimatedRowCount = $util.Long.fromValue(object.estimatedRowCount)).unsigned = false; @@ -8544,6 +8618,11 @@ object.estimatedRowCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; } else object.estimatedRowCount = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.estimatedTotalPhysicalFileSize = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.estimatedTotalPhysicalFileSize = options.longs === String ? "0" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -8584,6 +8663,11 @@ object.estimatedRowCount = options.longs === String ? String(message.estimatedRowCount) : message.estimatedRowCount; else object.estimatedRowCount = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedRowCount) : options.longs === Number ? new $util.LongBits(message.estimatedRowCount.low >>> 0, message.estimatedRowCount.high >>> 0).toNumber() : message.estimatedRowCount; + if (message.estimatedTotalPhysicalFileSize != null && message.hasOwnProperty("estimatedTotalPhysicalFileSize")) + if (typeof message.estimatedTotalPhysicalFileSize === "number") + object.estimatedTotalPhysicalFileSize = options.longs === String ? String(message.estimatedTotalPhysicalFileSize) : message.estimatedTotalPhysicalFileSize; + else + object.estimatedTotalPhysicalFileSize = options.longs === String ? $util.Long.prototype.toString.call(message.estimatedTotalPhysicalFileSize) : options.longs === Number ? new $util.LongBits(message.estimatedTotalPhysicalFileSize.low >>> 0, message.estimatedTotalPhysicalFileSize.high >>> 0).toNumber() : message.estimatedTotalPhysicalFileSize; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 2abd4ae3de3..00f25be32f8 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -515,6 +515,13 @@ "keyType": "string", "type": "MissingValueInterpretation", "id": 7 + }, + "defaultMissingValueInterpretation": { + "type": "MissingValueInterpretation", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } }, "nested": { @@ -829,6 +836,13 @@ "(google.api.field_behavior)": "OUTPUT_ONLY" } }, + "estimatedTotalPhysicalFileSize": { + "type": "int64", + "id": 15, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, "estimatedRowCount": { "type": "int64", "id": 14, diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index c121363cad9..30c28eabeeb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -29,14 +29,25 @@ function main(writeStream) { * TODO(developer): Uncomment these variables before running the sample. */ /** - * Required. The write_stream identifies the target of the append operation, - * and only needs to be specified as part of the first request on the gRPC - * connection. If provided for subsequent requests, it must match the value of - * the first request. + * Required. The write_stream identifies the append operation. It must be + * provided in the following scenarios: + * * In the first request to an AppendRows connection. + * * In all subsequent requests to an AppendRows connection, if you use the + * same connection to write to multiple tables or change the input schema for + * default streams. * For explicitly created write streams, the format is: * * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}` * For the special default stream, the format is: * * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`. + * An example of a possible sequence of requests with write_stream fields + * within a single connection: + * * r1: {write_stream: stream_name_1} + * * r2: {write_stream: /*omit* /} + * * r3: {write_stream: /*omit* /} + * * r4: {write_stream: stream_name_2} + * * r5: {write_stream: stream_name_2} + * The destination changed in request_4, so the write_stream field must be + * populated in all subsequent requests in this stream. */ // const writeStream = 'abc123' /** @@ -71,7 +82,19 @@ function main(writeStream) { * Currently, field name can only be top-level column name, can't be a struct * field path like 'foo.bar'. */ - // const missingValueInterpretations = 1234 + // const missingValueInterpretations = [1,2,3,4] + /** + * Optional. Default missing value interpretation for all columns in the + * table. When a value is specified on an `AppendRowsRequest`, it is applied + * to all requests on the connection from that point forward, until a + * subsequent `AppendRowsRequest` sets it to a different value. + * `missing_value_interpretation` can override + * `default_missing_value_interpretation`. For example, if you want to write + * `NULL` instead of using default values for some columns, you can set + * `default_missing_value_interpretation` to `DEFAULT_VALUE` and at the same + * time, set `missing_value_interpretations` to `NULL_VALUE` on those columns. + */ + // const defaultMissingValueInterpretation = {} // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index 78ce9fa71fb..fe982cecb2e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -36,7 +36,7 @@ function main(parent, writeStreams) { /** * Required. The group of streams that will be committed atomically. */ - // const writeStreams = 'abc123' + // const writeStreams = ['abc','def'] // Imports the Storage library const {BigQueryWriteClient} = require('@google-cloud/bigquery-storage').v1; diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json new file mode 100644 index 00000000000..e45697aeb8e --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -0,0 +1,431 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "4.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1", + "version": "v1" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", + "title": "BigQueryRead createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Data is assigned to each stream such that roughly the same number of rows can be read from each stream. Because the server-side unit for assigning data is collections of rows, the API does not guarantee that each stream will return the same number or rows. Additionally, the limits are enforced based on the number of pre-filtered rows, so some filters can lead to lopsided assignments. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_read.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 81, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_session", + "type": ".google.cloud.bigquery.storage.v1.ReadSession" + }, + { + "name": "max_stream_count", + "type": "TYPE_INT32" + }, + { + "name": "preferred_min_stream_count", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadSession", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", + "title": "BigQueryRead readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to read individual rows larger than 100 MiB will fail. Each request also returns a set of stream statistics reflecting the current state of the stream.", + "canonical": true, + "file": "big_query_read.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": "TYPE_INT64" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.ReadRowsResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", + "title": "BigQueryRead splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are referred to as the primary and the residual streams of the split. The original `ReadStream` can still be read from in the same manner as before. Both of the returned `ReadStream` objects can also be read from, and the rows returned by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back-to-back in the original `ReadStream`. Concretely, it is guaranteed that for streams original, primary, and residual, that original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read to completion.", + "canonical": true, + "file": "big_query_read.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 63, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "fraction", + "type": "TYPE_DOUBLE" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryReadClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryReadClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "service": { + "shortName": "BigQueryRead", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", + "title": "BigQueryRead createWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Creates a write stream to the given table. Additionally, every table has a special stream named '_default' to which data can be written. This stream doesn't need to be created using CreateWriteStream. It is a stream that can be used simultaneously by any number of clients. Data written to this stream is considered committed as soon as an acknowledgement is received.", + "canonical": true, + "file": "big_query_write.create_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_stream", + "type": ".google.cloud.bigquery.storage.v1.WriteStream" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "CreateWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", + "title": "BigQueryRead appendRows Sample", + "origin": "API_DEFINITION", + "description": " Appends data to the given stream. If `offset` is specified, the `offset` is checked against the end of stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an attempt is made to append to an offset beyond the current end of the stream or `ALREADY_EXISTS` if user provides an `offset` that has already been written to. User can retry with adjusted offset within the same RPC connection. If `offset` is not specified, append happens at the end of the stream. The response contains an optional offset at which the append happened. No offset information will be returned for appends to a default stream. Responses are received in the same order in which requests are sent. There will be one response for each successful inserted request. Responses may optionally embed error information if the originating AppendRequest was not successfully processed. The specifics of when successfully appended data is made visible to the table are governed by the type of stream: * For COMMITTED streams (which includes the default stream), data is visible immediately upon successful append. * For BUFFERED streams, data is made visible via a subsequent `FlushRows` rpc which advances a cursor to a newer offset in the stream. * For PENDING streams, data is not made visible until the stream itself is finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly committed via the `BatchCommitWriteStreams` rpc.", + "canonical": true, + "file": "big_query_write.append_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 120, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + }, + { + "name": "proto_rows", + "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + }, + { + "name": "missing_value_interpretations", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "default_missing_value_interpretation", + "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.AppendRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "AppendRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", + "title": "BigQueryRead getWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Gets information about a write stream.", + "canonical": true, + "file": "big_query_write.get_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "view", + "type": ".google.cloud.bigquery.storage.v1.WriteStreamView" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.WriteStream", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "GetWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", + "title": "BigQueryRead finalizeWriteStream Sample", + "origin": "API_DEFINITION", + "description": " Finalize a write stream so that no new data can be appended to the stream. Finalize is not supported on the '_default' stream.", + "canonical": true, + "file": "big_query_write.finalize_write_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FinalizeWriteStream", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", + "title": "BigQueryRead batchCommitWriteStreams Sample", + "origin": "API_DEFINITION", + "description": " Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams must be finalized before commit and cannot be committed multiple times. Once a stream is committed, data in the stream becomes available for read operations.", + "canonical": true, + "file": "big_query_write.batch_commit_write_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "write_streams", + "type": "TYPE_STRING[]" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "BatchCommitWriteStreams", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", + "title": "BigQueryRead flushRows Sample", + "origin": "API_DEFINITION", + "description": " Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush operation is required in order for the rows to become available for reading. A Flush operation flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in the request. Flush is not supported on the _default stream, since it is not BUFFERED.", + "canonical": true, + "file": "big_query_write.flush_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "async": true, + "parameters": [ + { + "name": "write_stream", + "type": "TYPE_STRING" + }, + { + "name": "offset", + "type": ".google.protobuf.Int64Value" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1.FlushRowsResponse", + "client": { + "shortName": "BigQueryWriteClient", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWriteClient" + }, + "method": { + "shortName": "FlushRows", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "service": { + "shortName": "BigQueryWrite", + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite" + } + } + } + } + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json new file mode 100644 index 00000000000..177857f71b8 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -0,0 +1,247 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "4.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta1", + "version": "v1beta1" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async", + "title": "BigQueryStorage createReadSession Sample", + "origin": "API_DEFINITION", + "description": " Creates a new read session. A read session divides the contents of a BigQuery table into one or more streams, which can then be used to read data from the table. The read session also specifies properties of the data to be read, such as a list of columns or a push-down filter describing the rows to be returned. A particular row can be read by at most one stream. When the caller has reached the end of each stream in the session, then all the data in the table has been read. Read sessions automatically expire 6 hours after they are created and do not require manual clean-up by the caller.", + "canonical": true, + "file": "big_query_storage.create_read_session.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 87, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "async": true, + "parameters": [ + { + "name": "table_reference", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReference" + }, + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "table_modifiers", + "type": ".google.cloud.bigquery.storage.v1beta1.TableModifiers" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + }, + { + "name": "read_options", + "type": ".google.cloud.bigquery.storage.v1beta1.TableReadOptions" + }, + { + "name": "format", + "type": ".google.cloud.bigquery.storage.v1beta1.DataFormat" + }, + { + "name": "sharding_strategy", + "type": ".google.cloud.bigquery.storage.v1beta1.ShardingStrategy" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadSession", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "CreateReadSession", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.CreateReadSession", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async", + "title": "BigQueryStorage readRows Sample", + "origin": "API_DEFINITION", + "description": " Reads rows from the table in the format prescribed by the read session. Each response contains one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to read individual rows larger than this will fail. Each request also returns a set of stream statistics reflecting the estimated total number of rows in the read stream. This number is computed based on the total table size and the number of active streams in the read session, and may change as other streams continue to read data.", + "canonical": true, + "file": "big_query_storage.read_rows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 57, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "async": true, + "parameters": [ + { + "name": "read_position", + "type": ".google.cloud.bigquery.storage.v1beta1.StreamPosition" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.ReadRowsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "ReadRows", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.ReadRows", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async", + "title": "BigQueryStorage batchCreateReadSessionStreams Sample", + "origin": "API_DEFINITION", + "description": " Creates additional streams for a ReadSession. This API can be used to dynamically adjust the parallelism of a batch processing task upwards by adding additional workers.", + "canonical": true, + "file": "big_query_storage.batch_create_read_session_streams.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "async": true, + "parameters": [ + { + "name": "session", + "type": ".google.cloud.bigquery.storage.v1beta1.ReadSession" + }, + { + "name": "requested_streams", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "BatchCreateReadSessionStreams", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.BatchCreateReadSessionStreams", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async", + "title": "BigQueryStorage finalizeStream Sample", + "origin": "API_DEFINITION", + "description": " Causes a single stream in a ReadSession to gracefully stop. This API can be used to dynamically adjust the parallelism of a batch processing task downwards without losing data. This API does not delete the stream -- it remains visible in the ReadSession, and any data processed by the stream is not released to other streams. However, no additional data will be assigned to the stream once this call completes. Callers must continue reading data on the stream until the end of the stream is reached so that data which has already been assigned to the stream will be processed. This method will return an error if there are no other live streams in the Session, or if SplitReadStream() has been called on the given Stream.", + "canonical": true, + "file": "big_query_storage.finalize_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 53, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "async": true, + "parameters": [ + { + "name": "stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "FinalizeStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.FinalizeStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async", + "title": "BigQueryStorage splitReadStream Sample", + "origin": "API_DEFINITION", + "description": " Splits a given read stream into two Streams. These streams are referred to as the primary and the residual of the split. The original stream can still be read from in the same manner as before. Both of the returned streams can also be read from, and the total rows return by both child streams will be the same as the rows read from the original stream. Moreover, the two child streams will be allocated back to back in the original Stream. Concretely, it is guaranteed that for streams Original, Primary, and Residual, that Original[0-j] = Primary[0-j] and Original[j-n] = Residual[0-m] once the streams have been read to completion. This method is guaranteed to be idempotent.", + "canonical": true, + "file": "big_query_storage.split_read_stream.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 63, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "async": true, + "parameters": [ + { + "name": "original_stream", + "type": ".google.cloud.bigquery.storage.v1beta1.Stream" + }, + { + "name": "fraction", + "type": "TYPE_FLOAT" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse", + "client": { + "shortName": "BigQueryStorageClient", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient" + }, + "method": { + "shortName": "SplitReadStream", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage.SplitReadStream", + "service": { + "shortName": "BigQueryStorage", + "fullName": "google.cloud.bigquery.storage.v1beta1.BigQueryStorage" + } + } + } + } + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 55dcdc0ebd7..83366b79cfe 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -403,9 +403,8 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.ReadSession | ReadSession}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.ReadSession|ReadSession}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_read.create_read_session.js * region_tag:bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async @@ -519,9 +518,8 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.SplitReadStreamResponse | SplitReadStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse|SplitReadStreamResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_read.split_read_stream.js * region_tag:bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async @@ -627,9 +625,8 @@ export class BigQueryReadClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} - * An object stream which emits {@link google.cloud.bigquery.storage.v1.ReadRowsResponse | ReadRowsResponse} on 'data' event. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * An object stream which emits {@link protos.google.cloud.bigquery.storage.v1.ReadRowsResponse|ReadRowsResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_read.read_rows.js * region_tag:bigquerystorage_v1_generated_BigQueryRead_ReadRows_async diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 6420025f874..a2c56ea5ae7 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -377,9 +377,8 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.WriteStream | WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.WriteStream|WriteStream}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.create_write_stream.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async @@ -478,9 +477,8 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.WriteStream | WriteStream}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.WriteStream|WriteStream}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.get_write_stream.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async @@ -577,9 +575,8 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse | FinalizeWriteStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse|FinalizeWriteStreamResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.finalize_write_stream.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async @@ -682,9 +679,8 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse | BatchCommitWriteStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse|BatchCommitWriteStreamsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.batch_commit_write_streams.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async @@ -793,9 +789,8 @@ export class BigQueryWriteClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1.FlushRowsResponse | FlushRowsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1.FlushRowsResponse|FlushRowsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.flush_rows.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async @@ -912,10 +907,9 @@ export class BigQueryWriteClient { * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} * An object stream which is both readable and writable. It accepts objects - * representing {@link google.cloud.bigquery.storage.v1.AppendRowsRequest | AppendRowsRequest} for write() method, and - * will emit objects representing {@link google.cloud.bigquery.storage.v1.AppendRowsResponse | AppendRowsResponse} on 'data' event asynchronously. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming) + * representing {@link protos.google.cloud.bigquery.storage.v1.AppendRowsRequest|AppendRowsRequest} for write() method, and + * will emit objects representing {@link protos.google.cloud.bigquery.storage.v1.AppendRowsResponse|AppendRowsResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } * for more details and examples. * @example include:samples/generated/v1/big_query_write.append_rows.js * region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json index 4b7f4b0657b..cf13ab0c739 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client_config.json @@ -49,22 +49,22 @@ }, "GetWriteStream": { "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", "retry_params_name": "default" }, "FinalizeWriteStream": { "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", "retry_params_name": "default" }, "BatchCommitWriteStreams": { "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", "retry_params_name": "default" }, "FlushRows": { "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "retry_codes_name": "deadline_exceeded_resource_exhausted_unavailable", "retry_params_name": "default" } } diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 1a4e3ef0ed5..97a19a7a486 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -397,9 +397,8 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.ReadSession | ReadSession}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta1.ReadSession|ReadSession}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1beta1/big_query_storage.create_read_session.js * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_CreateReadSession_async @@ -502,9 +501,8 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse | BatchCreateReadSessionStreamsResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse|BatchCreateReadSessionStreamsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_BatchCreateReadSessionStreams_async @@ -616,9 +614,8 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.protobuf.Empty | Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.protobuf.Empty|Empty}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1beta1/big_query_storage.finalize_stream.js * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_FinalizeStream_async @@ -733,9 +730,8 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing {@link google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse | SplitReadStreamResponse}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse|SplitReadStreamResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } * for more details and examples. * @example include:samples/generated/v1beta1/big_query_storage.split_read_stream.js * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_SplitReadStream_async @@ -841,9 +837,8 @@ export class BigQueryStorageClient { * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Stream} - * An object stream which emits {@link google.cloud.bigquery.storage.v1beta1.ReadRowsResponse | ReadRowsResponse} on 'data' event. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming) + * An object stream which emits {@link protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse|ReadRowsResponse} on 'data' event. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#server-streaming | documentation } * for more details and examples. * @example include:samples/generated/v1beta1/big_query_storage.read_rows.js * region_tag:bigquerystorage_v1beta1_generated_BigQueryStorage_ReadRows_async diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 3e4fc28a671..f61fe236476 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -30,15 +30,9 @@ describe('📦 pack-n-play test', () => { ts: readFileSync( './system-test/fixtures/sample/src/index.ts' ).toString(), - dependencies: ['typescript@4.8.3'], }, }; - try { - await packNTest(options); - } catch (err) { - console.error('TS install failed:\n', err); - throw err; - } + await packNTest(options); }); it('JavaScript code', async function () { @@ -50,14 +44,8 @@ describe('📦 pack-n-play test', () => { ts: readFileSync( './system-test/fixtures/sample/src/index.js' ).toString(), - dependencies: ['typescript@4.8.3'], }, }; - try { - await packNTest(options); - } catch (err) { - console.error('JS install failed:\n', err); - throw err; - } + await packNTest(options); }); }); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index e41d046a36b..c178c3468b6 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -556,7 +556,9 @@ describe('v1.BigQueryReadClient', () => { request.readStream = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); - const stream = client.readRows(request); + const stream = client.readRows(request, { + retryRequestOptions: {noResponseRetries: 0}, + }); const promise = new Promise((resolve, reject) => { stream.on( 'data', diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 2aa76e8ec7e..ecf41159838 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -867,7 +867,9 @@ describe('v1beta1.BigQueryStorageClient', () => { request.readPosition.stream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); client.close(); - const stream = client.readRows(request); + const stream = client.readRows(request, { + retryRequestOptions: {noResponseRetries: 0}, + }); const promise = new Promise((resolve, reject) => { stream.on( 'data', diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json index b4fd9192913..c78f1c884ef 100644 --- a/handwritten/bigquery-storage/tsconfig.json +++ b/handwritten/bigquery-storage/tsconfig.json @@ -7,8 +7,7 @@ "lib": [ "es2018", "dom" - ], - "module": "CommonJS" + ] }, "include": [ "src/*.ts", From 88e9b7a6cca78b288ceb009530f89d21251c1e29 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 18 Aug 2023 14:35:42 -0400 Subject: [PATCH 235/333] fix: struct field names should not be lowercase (#364) Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> --- handwritten/bigquery-storage/src/adapt/proto.ts | 4 ++-- handwritten/bigquery-storage/test/adapt/proto.ts | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 3833a7ffabf..1a1ed992894 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -220,7 +220,7 @@ function convertTableFieldSchemaToFieldDescriptorProto( scope: string, useProto3: boolean ): FieldDescriptorProto { - const name = `${field.name}`.toLowerCase(); + const name = field.name; const type = field.type; if (!type) { throw Error(`table field ${name} missing type`); @@ -241,7 +241,7 @@ function convertTableFieldSchemaToFieldDescriptorProto( throw Error(`table field type ${type} not supported`); } fdp = new FieldDescriptorProto({ - name: field.name, + name: name, number: fNumber, type: pType, label: label, diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 0c5c5ff605a..99fea44edd2 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -80,7 +80,7 @@ describe('Adapt Protos', () => { mode: 'NULLABLE', }, { - name: 'details', + name: 'recordDetails', type: 'STRUCT', mode: 'REPEATED', fields: [ @@ -136,11 +136,11 @@ describe('Adapt Protos', () => { options: {}, }, { - name: 'details', + name: 'recordDetails', number: 2, label: 'LABEL_REPEATED', type: 'TYPE_MESSAGE', - typeName: 'Nested_details', + typeName: 'Nested_recordDetails', }, { name: 'metadata', @@ -152,7 +152,7 @@ describe('Adapt Protos', () => { ], nestedType: [ { - name: 'Nested_details', + name: 'Nested_recordDetails', field: [ { name: 'key', @@ -194,7 +194,7 @@ describe('Adapt Protos', () => { const NestedProto = (Type as any).fromDescriptor(protoDescriptor); const raw = { record_id: '12345', - details: [ + recordDetails: [ {key: 'name', value: 'jimmy'}, {key: 'title', value: 'clown'}, ], From 742bb75716346925180fd1a57e88f4215f6228d9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 21 Sep 2023 12:12:15 +0200 Subject: [PATCH 236/333] chore(deps): update dependency gapic-tools to ^0.2.0 (#372) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [gapic-tools](https://togithub.com/googleapis/gax-nodejs) | [`^0.1.8` -> `^0.2.0`](https://renovatebot.com/diffs/npm/gapic-tools/0.1.8/0.2.0) | [![age](https://developer.mend.io/api/mc/badges/age/npm/gapic-tools/0.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/gapic-tools/0.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/gapic-tools/0.1.8/0.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/gapic-tools/0.1.8/0.2.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/gax-nodejs (gapic-tools) ### [`v0.2.0`](https://togithub.com/googleapis/gax-nodejs/releases/tag/gapic-tools-v0.2.0): gapic-tools: v0.2.0 ##### Features - add ESM tools in gax ([#​1459](https://togithub.com/googleapis/gax-nodejs/issues/1459)) ([0fb1cf9](https://togithub.com/googleapis/gax-nodejs/commit/0fb1cf9acd32dc1ae03a33279eca9449a7d3fca7)) ##### Bug Fixes - **deps:** update dependency google-proto-files to v4 ([#​1490](https://togithub.com/googleapis/gax-nodejs/issues/1490)) ([4748c9f](https://togithub.com/googleapis/gax-nodejs/commit/4748c9fc3a8cfe31e5abb3e35a6ee0d9a6f0e560)) - **deps:** update dependency protobufjs-cli to v1.1.2 ([#​1495](https://togithub.com/googleapis/gax-nodejs/issues/1495)) ([762591e](https://togithub.com/googleapis/gax-nodejs/commit/762591ed28801e5311ab737b04185781a41752e6)) - make gapic-tools depend on gax-nodejs ([#​1480](https://togithub.com/googleapis/gax-nodejs/issues/1480)) ([d0f410d](https://togithub.com/googleapis/gax-nodejs/commit/d0f410d2e08f393f2661c8c92568a0b518fddf99)) - release new version of gapic-tools ([#​1483](https://togithub.com/googleapis/gax-nodejs/issues/1483)) ([e4f5482](https://togithub.com/googleapis/gax-nodejs/commit/e4f548254bfce3daa3b02ae81764bb3394fc4f23))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://developer.mend.io/github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6772cd5b9ba..8eac4f7b0b9 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -36,7 +36,7 @@ "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", "c8": "^8.0.0", - "gapic-tools": "^0.1.8", + "gapic-tools": "^0.2.0", "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^3.0.0", From cb6bb2389d15703b8203a3ca9c4a26bf6ba7507f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 23:18:25 -0700 Subject: [PATCH 237/333] chore: call non-hermetic functions since we're installing node_modules directly from the library (#377) * chore: call non-hermetic functions since we're installing node_modules directly from the library Source-Link: https://github.com/googleapis/synthtool/commit/4c4063f8395130957a0d49fcec810a7d0a76cf7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:8b6a07a38d1583d96b6e251ba208bd4ef0bc2a0cc37471ffc518841651d15bd6 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/sync-repo-settings.yaml | 1 - .../bigquery-storage/protos/protos.d.ts | 540 +++- handwritten/bigquery-storage/protos/protos.js | 2215 +++++++++++++++-- .../bigquery-storage/protos/protos.json | 292 ++- 5 files changed, 2857 insertions(+), 195 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index a3d003c65a1..807a8916118 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:e08f9a3757808cdaf7a377e962308c65c4d7eff12db206d4fae702dd50d43430 -# created: 2023-08-03T18:46:14.719706948Z + digest: sha256:8b6a07a38d1583d96b6e251ba208bd4ef0bc2a0cc37471ffc518841651d15bd6 +# created: 2023-09-25T22:18:27.595486267Z diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml index 1350faeff2a..b46e4c4d61d 100644 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -7,7 +7,6 @@ branchProtectionRules: requiredStatusCheckContexts: - "ci/kokoro: Samples test" - "ci/kokoro: System test" - - docs - lint - test (14) - test (16) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 04a3856b455..86514321264 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -7411,6 +7411,15 @@ export namespace google { /** ExtensionRangeOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); + + /** ExtensionRangeOptions declaration */ + declaration?: (google.protobuf.ExtensionRangeOptions.IDeclaration[]|null); + + /** ExtensionRangeOptions features */ + features?: (google.protobuf.IFeatureSet|null); + + /** ExtensionRangeOptions verification */ + verification?: (google.protobuf.ExtensionRangeOptions.VerificationState|keyof typeof google.protobuf.ExtensionRangeOptions.VerificationState|null); } /** Represents an ExtensionRangeOptions. */ @@ -7425,6 +7434,15 @@ export namespace google { /** ExtensionRangeOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; + /** ExtensionRangeOptions declaration. */ + public declaration: google.protobuf.ExtensionRangeOptions.IDeclaration[]; + + /** ExtensionRangeOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + + /** ExtensionRangeOptions verification. */ + public verification: (google.protobuf.ExtensionRangeOptions.VerificationState|keyof typeof google.protobuf.ExtensionRangeOptions.VerificationState); + /** * Creates a new ExtensionRangeOptions instance using the specified properties. * @param [properties] Properties to set @@ -7503,6 +7521,136 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace ExtensionRangeOptions { + + /** Properties of a Declaration. */ + interface IDeclaration { + + /** Declaration number */ + number?: (number|null); + + /** Declaration fullName */ + fullName?: (string|null); + + /** Declaration type */ + type?: (string|null); + + /** Declaration reserved */ + reserved?: (boolean|null); + + /** Declaration repeated */ + repeated?: (boolean|null); + } + + /** Represents a Declaration. */ + class Declaration implements IDeclaration { + + /** + * Constructs a new Declaration. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ExtensionRangeOptions.IDeclaration); + + /** Declaration number. */ + public number: number; + + /** Declaration fullName. */ + public fullName: string; + + /** Declaration type. */ + public type: string; + + /** Declaration reserved. */ + public reserved: boolean; + + /** Declaration repeated. */ + public repeated: boolean; + + /** + * Creates a new Declaration instance using the specified properties. + * @param [properties] Properties to set + * @returns Declaration instance + */ + public static create(properties?: google.protobuf.ExtensionRangeOptions.IDeclaration): google.protobuf.ExtensionRangeOptions.Declaration; + + /** + * Encodes the specified Declaration message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.Declaration.verify|verify} messages. + * @param message Declaration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ExtensionRangeOptions.IDeclaration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Declaration message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.Declaration.verify|verify} messages. + * @param message Declaration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ExtensionRangeOptions.IDeclaration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Declaration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Declaration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.ExtensionRangeOptions.Declaration; + + /** + * Decodes a Declaration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Declaration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.ExtensionRangeOptions.Declaration; + + /** + * Verifies a Declaration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Declaration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Declaration + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.ExtensionRangeOptions.Declaration; + + /** + * Creates a plain object from a Declaration message. Also converts values to other types if specified. + * @param message Declaration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.ExtensionRangeOptions.Declaration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Declaration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Declaration + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** VerificationState enum. */ + enum VerificationState { + DECLARATION = 0, + UNVERIFIED = 1 + } + } + /** Properties of a FieldDescriptorProto. */ interface IFieldDescriptorProto { @@ -8430,6 +8578,9 @@ export namespace google { /** FileOptions rubyPackage */ rubyPackage?: (string|null); + /** FileOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** FileOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -8506,6 +8657,9 @@ export namespace google { /** FileOptions rubyPackage. */ public rubyPackage: string; + /** FileOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** FileOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -8615,6 +8769,9 @@ export namespace google { /** MessageOptions deprecatedLegacyJsonFieldConflicts */ deprecatedLegacyJsonFieldConflicts?: (boolean|null); + /** MessageOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** MessageOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -8646,6 +8803,9 @@ export namespace google { /** MessageOptions deprecatedLegacyJsonFieldConflicts. */ public deprecatedLegacyJsonFieldConflicts: boolean; + /** MessageOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** MessageOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -8757,8 +8917,14 @@ export namespace google { /** FieldOptions retention */ retention?: (google.protobuf.FieldOptions.OptionRetention|keyof typeof google.protobuf.FieldOptions.OptionRetention|null); - /** FieldOptions target */ - target?: (google.protobuf.FieldOptions.OptionTargetType|keyof typeof google.protobuf.FieldOptions.OptionTargetType|null); + /** FieldOptions targets */ + targets?: (google.protobuf.FieldOptions.OptionTargetType[]|null); + + /** FieldOptions editionDefaults */ + editionDefaults?: (google.protobuf.FieldOptions.IEditionDefault[]|null); + + /** FieldOptions features */ + features?: (google.protobuf.IFeatureSet|null); /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -8809,8 +8975,14 @@ export namespace google { /** FieldOptions retention. */ public retention: (google.protobuf.FieldOptions.OptionRetention|keyof typeof google.protobuf.FieldOptions.OptionRetention); - /** FieldOptions target. */ - public target: (google.protobuf.FieldOptions.OptionTargetType|keyof typeof google.protobuf.FieldOptions.OptionTargetType); + /** FieldOptions targets. */ + public targets: google.protobuf.FieldOptions.OptionTargetType[]; + + /** FieldOptions editionDefaults. */ + public editionDefaults: google.protobuf.FieldOptions.IEditionDefault[]; + + /** FieldOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -8929,11 +9101,117 @@ export namespace google { TARGET_TYPE_SERVICE = 8, TARGET_TYPE_METHOD = 9 } + + /** Properties of an EditionDefault. */ + interface IEditionDefault { + + /** EditionDefault edition */ + edition?: (string|null); + + /** EditionDefault value */ + value?: (string|null); + } + + /** Represents an EditionDefault. */ + class EditionDefault implements IEditionDefault { + + /** + * Constructs a new EditionDefault. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FieldOptions.IEditionDefault); + + /** EditionDefault edition. */ + public edition: string; + + /** EditionDefault value. */ + public value: string; + + /** + * Creates a new EditionDefault instance using the specified properties. + * @param [properties] Properties to set + * @returns EditionDefault instance + */ + public static create(properties?: google.protobuf.FieldOptions.IEditionDefault): google.protobuf.FieldOptions.EditionDefault; + + /** + * Encodes the specified EditionDefault message. Does not implicitly {@link google.protobuf.FieldOptions.EditionDefault.verify|verify} messages. + * @param message EditionDefault message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FieldOptions.IEditionDefault, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EditionDefault message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.EditionDefault.verify|verify} messages. + * @param message EditionDefault message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FieldOptions.IEditionDefault, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EditionDefault message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.EditionDefault; + + /** + * Decodes an EditionDefault message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.EditionDefault; + + /** + * Verifies an EditionDefault message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EditionDefault message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EditionDefault + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.EditionDefault; + + /** + * Creates a plain object from an EditionDefault message. Also converts values to other types if specified. + * @param message EditionDefault + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions.EditionDefault, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EditionDefault to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EditionDefault + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } /** Properties of an OneofOptions. */ interface IOneofOptions { + /** OneofOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** OneofOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -8947,6 +9225,9 @@ export namespace google { */ constructor(properties?: google.protobuf.IOneofOptions); + /** OneofOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** OneofOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -9040,6 +9321,9 @@ export namespace google { /** EnumOptions deprecatedLegacyJsonFieldConflicts */ deprecatedLegacyJsonFieldConflicts?: (boolean|null); + /** EnumOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** EnumOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -9062,6 +9346,9 @@ export namespace google { /** EnumOptions deprecatedLegacyJsonFieldConflicts. */ public deprecatedLegacyJsonFieldConflicts: boolean; + /** EnumOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** EnumOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -9149,6 +9436,12 @@ export namespace google { /** EnumValueOptions deprecated */ deprecated?: (boolean|null); + /** EnumValueOptions features */ + features?: (google.protobuf.IFeatureSet|null); + + /** EnumValueOptions debugRedact */ + debugRedact?: (boolean|null); + /** EnumValueOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -9165,6 +9458,12 @@ export namespace google { /** EnumValueOptions deprecated. */ public deprecated: boolean; + /** EnumValueOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + + /** EnumValueOptions debugRedact. */ + public debugRedact: boolean; + /** EnumValueOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -9249,6 +9548,9 @@ export namespace google { /** Properties of a ServiceOptions. */ interface IServiceOptions { + /** ServiceOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** ServiceOptions deprecated */ deprecated?: (boolean|null); @@ -9271,6 +9573,9 @@ export namespace google { */ constructor(properties?: google.protobuf.IServiceOptions); + /** ServiceOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** ServiceOptions deprecated. */ public deprecated: boolean; @@ -9364,6 +9669,9 @@ export namespace google { /** MethodOptions idempotencyLevel */ idempotencyLevel?: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel|null); + /** MethodOptions features */ + features?: (google.protobuf.IFeatureSet|null); + /** MethodOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -9389,6 +9697,9 @@ export namespace google { /** MethodOptions idempotencyLevel. */ public idempotencyLevel: (google.protobuf.MethodOptions.IdempotencyLevel|keyof typeof google.protobuf.MethodOptions.IdempotencyLevel); + /** MethodOptions features. */ + public features?: (google.protobuf.IFeatureSet|null); + /** MethodOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -9719,6 +10030,186 @@ export namespace google { } } + /** Properties of a FeatureSet. */ + interface IFeatureSet { + + /** FeatureSet fieldPresence */ + fieldPresence?: (google.protobuf.FeatureSet.FieldPresence|keyof typeof google.protobuf.FeatureSet.FieldPresence|null); + + /** FeatureSet enumType */ + enumType?: (google.protobuf.FeatureSet.EnumType|keyof typeof google.protobuf.FeatureSet.EnumType|null); + + /** FeatureSet repeatedFieldEncoding */ + repeatedFieldEncoding?: (google.protobuf.FeatureSet.RepeatedFieldEncoding|keyof typeof google.protobuf.FeatureSet.RepeatedFieldEncoding|null); + + /** FeatureSet stringFieldValidation */ + stringFieldValidation?: (google.protobuf.FeatureSet.StringFieldValidation|keyof typeof google.protobuf.FeatureSet.StringFieldValidation|null); + + /** FeatureSet messageEncoding */ + messageEncoding?: (google.protobuf.FeatureSet.MessageEncoding|keyof typeof google.protobuf.FeatureSet.MessageEncoding|null); + + /** FeatureSet jsonFormat */ + jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); + + /** FeatureSet rawFeatures */ + rawFeatures?: (google.protobuf.IFeatureSet|null); + } + + /** Represents a FeatureSet. */ + class FeatureSet implements IFeatureSet { + + /** + * Constructs a new FeatureSet. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFeatureSet); + + /** FeatureSet fieldPresence. */ + public fieldPresence: (google.protobuf.FeatureSet.FieldPresence|keyof typeof google.protobuf.FeatureSet.FieldPresence); + + /** FeatureSet enumType. */ + public enumType: (google.protobuf.FeatureSet.EnumType|keyof typeof google.protobuf.FeatureSet.EnumType); + + /** FeatureSet repeatedFieldEncoding. */ + public repeatedFieldEncoding: (google.protobuf.FeatureSet.RepeatedFieldEncoding|keyof typeof google.protobuf.FeatureSet.RepeatedFieldEncoding); + + /** FeatureSet stringFieldValidation. */ + public stringFieldValidation: (google.protobuf.FeatureSet.StringFieldValidation|keyof typeof google.protobuf.FeatureSet.StringFieldValidation); + + /** FeatureSet messageEncoding. */ + public messageEncoding: (google.protobuf.FeatureSet.MessageEncoding|keyof typeof google.protobuf.FeatureSet.MessageEncoding); + + /** FeatureSet jsonFormat. */ + public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); + + /** FeatureSet rawFeatures. */ + public rawFeatures?: (google.protobuf.IFeatureSet|null); + + /** + * Creates a new FeatureSet instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSet instance + */ + public static create(properties?: google.protobuf.IFeatureSet): google.protobuf.FeatureSet; + + /** + * Encodes the specified FeatureSet message. Does not implicitly {@link google.protobuf.FeatureSet.verify|verify} messages. + * @param message FeatureSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFeatureSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSet message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.verify|verify} messages. + * @param message FeatureSet message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFeatureSet, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSet message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet; + + /** + * Decodes a FeatureSet message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet; + + /** + * Verifies a FeatureSet message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSet message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSet + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet; + + /** + * Creates a plain object from a FeatureSet message. Also converts values to other types if specified. + * @param message FeatureSet + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSet, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSet to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSet + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace FeatureSet { + + /** FieldPresence enum. */ + enum FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0, + EXPLICIT = 1, + IMPLICIT = 2, + LEGACY_REQUIRED = 3 + } + + /** EnumType enum. */ + enum EnumType { + ENUM_TYPE_UNKNOWN = 0, + OPEN = 1, + CLOSED = 2 + } + + /** RepeatedFieldEncoding enum. */ + enum RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0, + PACKED = 1, + EXPANDED = 2 + } + + /** StringFieldValidation enum. */ + enum StringFieldValidation { + STRING_FIELD_VALIDATION_UNKNOWN = 0, + MANDATORY = 1, + HINT = 2, + NONE = 3 + } + + /** MessageEncoding enum. */ + enum MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0, + LENGTH_PREFIXED = 1, + DELIMITED = 2 + } + + /** JsonFormat enum. */ + enum JsonFormat { + JSON_FORMAT_UNKNOWN = 0, + ALLOW = 1, + LEGACY_BEST_EFFORT = 2 + } + } + /** Properties of a SourceCodeInfo. */ interface ISourceCodeInfo { @@ -12097,6 +12588,9 @@ export namespace google { /** Publishing librarySettings */ librarySettings?: (google.api.IClientLibrarySettings[]|null); + + /** Publishing protoReferenceDocumentationUri */ + protoReferenceDocumentationUri?: (string|null); } /** Represents a Publishing. */ @@ -12135,6 +12629,9 @@ export namespace google { /** Publishing librarySettings. */ public librarySettings: google.api.IClientLibrarySettings[]; + /** Publishing protoReferenceDocumentationUri. */ + public protoReferenceDocumentationUri: string; + /** * Creates a new Publishing instance using the specified properties. * @param [properties] Properties to set @@ -12715,6 +13212,21 @@ export namespace google { /** DotnetSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** DotnetSettings renamedServices */ + renamedServices?: ({ [k: string]: string }|null); + + /** DotnetSettings renamedResources */ + renamedResources?: ({ [k: string]: string }|null); + + /** DotnetSettings ignoredResources */ + ignoredResources?: (string[]|null); + + /** DotnetSettings forcedNamespaceAliases */ + forcedNamespaceAliases?: (string[]|null); + + /** DotnetSettings handwrittenSignatures */ + handwrittenSignatures?: (string[]|null); } /** Represents a DotnetSettings. */ @@ -12729,6 +13241,21 @@ export namespace google { /** DotnetSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** DotnetSettings renamedServices. */ + public renamedServices: { [k: string]: string }; + + /** DotnetSettings renamedResources. */ + public renamedResources: { [k: string]: string }; + + /** DotnetSettings ignoredResources. */ + public ignoredResources: string[]; + + /** DotnetSettings forcedNamespaceAliases. */ + public forcedNamespaceAliases: string[]; + + /** DotnetSettings handwrittenSignatures. */ + public handwrittenSignatures: string[]; + /** * Creates a new DotnetSettings instance using the specified properties. * @param [properties] Properties to set @@ -13228,7 +13755,10 @@ export namespace google { CLOUD = 1, ADS = 2, PHOTOS = 3, - STREET_VIEW = 4 + STREET_VIEW = 4, + SHOPPING = 5, + GEO = 6, + GENERATIVE_AI = 7 } /** ClientLibraryDestination enum. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 4017976d4fc..71a78d7ed87 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -18407,6 +18407,9 @@ * @memberof google.protobuf * @interface IExtensionRangeOptions * @property {Array.|null} [uninterpretedOption] ExtensionRangeOptions uninterpretedOption + * @property {Array.|null} [declaration] ExtensionRangeOptions declaration + * @property {google.protobuf.IFeatureSet|null} [features] ExtensionRangeOptions features + * @property {google.protobuf.ExtensionRangeOptions.VerificationState|null} [verification] ExtensionRangeOptions verification */ /** @@ -18419,6 +18422,7 @@ */ function ExtensionRangeOptions(properties) { this.uninterpretedOption = []; + this.declaration = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -18433,6 +18437,30 @@ */ ExtensionRangeOptions.prototype.uninterpretedOption = $util.emptyArray; + /** + * ExtensionRangeOptions declaration. + * @member {Array.} declaration + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.declaration = $util.emptyArray; + + /** + * ExtensionRangeOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.features = null; + + /** + * ExtensionRangeOptions verification. + * @member {google.protobuf.ExtensionRangeOptions.VerificationState} verification + * @memberof google.protobuf.ExtensionRangeOptions + * @instance + */ + ExtensionRangeOptions.prototype.verification = 1; + /** * Creates a new ExtensionRangeOptions instance using the specified properties. * @function create @@ -18457,6 +18485,13 @@ ExtensionRangeOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); + if (message.declaration != null && message.declaration.length) + for (var i = 0; i < message.declaration.length; ++i) + $root.google.protobuf.ExtensionRangeOptions.Declaration.encode(message.declaration[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.verification != null && Object.hasOwnProperty.call(message, "verification")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.verification); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 50, wireType 2 =*/402).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -18500,6 +18535,20 @@ message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; } + case 2: { + if (!(message.declaration && message.declaration.length)) + message.declaration = []; + message.declaration.push($root.google.protobuf.ExtensionRangeOptions.Declaration.decode(reader, reader.uint32())); + break; + } + case 50: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 3: { + message.verification = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -18544,6 +18593,28 @@ return "uninterpretedOption." + error; } } + if (message.declaration != null && message.hasOwnProperty("declaration")) { + if (!Array.isArray(message.declaration)) + return "declaration: array expected"; + for (var i = 0; i < message.declaration.length; ++i) { + var error = $root.google.protobuf.ExtensionRangeOptions.Declaration.verify(message.declaration[i]); + if (error) + return "declaration." + error; + } + } + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } + if (message.verification != null && message.hasOwnProperty("verification")) + switch (message.verification) { + default: + return "verification: enum value expected"; + case 0: + case 1: + break; + } return null; }; @@ -18569,6 +18640,37 @@ message.uninterpretedOption[i] = $root.google.protobuf.UninterpretedOption.fromObject(object.uninterpretedOption[i]); } } + if (object.declaration) { + if (!Array.isArray(object.declaration)) + throw TypeError(".google.protobuf.ExtensionRangeOptions.declaration: array expected"); + message.declaration = []; + for (var i = 0; i < object.declaration.length; ++i) { + if (typeof object.declaration[i] !== "object") + throw TypeError(".google.protobuf.ExtensionRangeOptions.declaration: object expected"); + message.declaration[i] = $root.google.protobuf.ExtensionRangeOptions.Declaration.fromObject(object.declaration[i]); + } + } + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.ExtensionRangeOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } + switch (object.verification) { + case "DECLARATION": + case 0: + message.verification = 0; + break; + default: + if (typeof object.verification === "number") { + message.verification = object.verification; + break; + } + break; + case "UNVERIFIED": + case 1: + message.verification = 1; + break; + } return message; }; @@ -18585,8 +18687,23 @@ if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) + if (options.arrays || options.defaults) { + object.declaration = []; object.uninterpretedOption = []; + } + if (options.defaults) { + object.verification = options.enums === String ? "UNVERIFIED" : 1; + object.features = null; + } + if (message.declaration && message.declaration.length) { + object.declaration = []; + for (var j = 0; j < message.declaration.length; ++j) + object.declaration[j] = $root.google.protobuf.ExtensionRangeOptions.Declaration.toObject(message.declaration[j], options); + } + if (message.verification != null && message.hasOwnProperty("verification")) + object.verification = options.enums === String ? $root.google.protobuf.ExtensionRangeOptions.VerificationState[message.verification] === undefined ? message.verification : $root.google.protobuf.ExtensionRangeOptions.VerificationState[message.verification] : message.verification; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -18621,6 +18738,316 @@ return typeUrlPrefix + "/google.protobuf.ExtensionRangeOptions"; }; + ExtensionRangeOptions.Declaration = (function() { + + /** + * Properties of a Declaration. + * @memberof google.protobuf.ExtensionRangeOptions + * @interface IDeclaration + * @property {number|null} [number] Declaration number + * @property {string|null} [fullName] Declaration fullName + * @property {string|null} [type] Declaration type + * @property {boolean|null} [reserved] Declaration reserved + * @property {boolean|null} [repeated] Declaration repeated + */ + + /** + * Constructs a new Declaration. + * @memberof google.protobuf.ExtensionRangeOptions + * @classdesc Represents a Declaration. + * @implements IDeclaration + * @constructor + * @param {google.protobuf.ExtensionRangeOptions.IDeclaration=} [properties] Properties to set + */ + function Declaration(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Declaration number. + * @member {number} number + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + */ + Declaration.prototype.number = 0; + + /** + * Declaration fullName. + * @member {string} fullName + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + */ + Declaration.prototype.fullName = ""; + + /** + * Declaration type. + * @member {string} type + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + */ + Declaration.prototype.type = ""; + + /** + * Declaration reserved. + * @member {boolean} reserved + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + */ + Declaration.prototype.reserved = false; + + /** + * Declaration repeated. + * @member {boolean} repeated + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + */ + Declaration.prototype.repeated = false; + + /** + * Creates a new Declaration instance using the specified properties. + * @function create + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {google.protobuf.ExtensionRangeOptions.IDeclaration=} [properties] Properties to set + * @returns {google.protobuf.ExtensionRangeOptions.Declaration} Declaration instance + */ + Declaration.create = function create(properties) { + return new Declaration(properties); + }; + + /** + * Encodes the specified Declaration message. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.Declaration.verify|verify} messages. + * @function encode + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {google.protobuf.ExtensionRangeOptions.IDeclaration} message Declaration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Declaration.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.number != null && Object.hasOwnProperty.call(message, "number")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.number); + if (message.fullName != null && Object.hasOwnProperty.call(message, "fullName")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.fullName); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.type); + if (message.reserved != null && Object.hasOwnProperty.call(message, "reserved")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.reserved); + if (message.repeated != null && Object.hasOwnProperty.call(message, "repeated")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.repeated); + return writer; + }; + + /** + * Encodes the specified Declaration message, length delimited. Does not implicitly {@link google.protobuf.ExtensionRangeOptions.Declaration.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {google.protobuf.ExtensionRangeOptions.IDeclaration} message Declaration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Declaration.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Declaration message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.ExtensionRangeOptions.Declaration} Declaration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Declaration.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions.Declaration(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.number = reader.int32(); + break; + } + case 2: { + message.fullName = reader.string(); + break; + } + case 3: { + message.type = reader.string(); + break; + } + case 5: { + message.reserved = reader.bool(); + break; + } + case 6: { + message.repeated = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Declaration message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.ExtensionRangeOptions.Declaration} Declaration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Declaration.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Declaration message. + * @function verify + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Declaration.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.number != null && message.hasOwnProperty("number")) + if (!$util.isInteger(message.number)) + return "number: integer expected"; + if (message.fullName != null && message.hasOwnProperty("fullName")) + if (!$util.isString(message.fullName)) + return "fullName: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + if (message.reserved != null && message.hasOwnProperty("reserved")) + if (typeof message.reserved !== "boolean") + return "reserved: boolean expected"; + if (message.repeated != null && message.hasOwnProperty("repeated")) + if (typeof message.repeated !== "boolean") + return "repeated: boolean expected"; + return null; + }; + + /** + * Creates a Declaration message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.ExtensionRangeOptions.Declaration} Declaration + */ + Declaration.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.ExtensionRangeOptions.Declaration) + return object; + var message = new $root.google.protobuf.ExtensionRangeOptions.Declaration(); + if (object.number != null) + message.number = object.number | 0; + if (object.fullName != null) + message.fullName = String(object.fullName); + if (object.type != null) + message.type = String(object.type); + if (object.reserved != null) + message.reserved = Boolean(object.reserved); + if (object.repeated != null) + message.repeated = Boolean(object.repeated); + return message; + }; + + /** + * Creates a plain object from a Declaration message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {google.protobuf.ExtensionRangeOptions.Declaration} message Declaration + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Declaration.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.number = 0; + object.fullName = ""; + object.type = ""; + object.reserved = false; + object.repeated = false; + } + if (message.number != null && message.hasOwnProperty("number")) + object.number = message.number; + if (message.fullName != null && message.hasOwnProperty("fullName")) + object.fullName = message.fullName; + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + if (message.reserved != null && message.hasOwnProperty("reserved")) + object.reserved = message.reserved; + if (message.repeated != null && message.hasOwnProperty("repeated")) + object.repeated = message.repeated; + return object; + }; + + /** + * Converts this Declaration to JSON. + * @function toJSON + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @instance + * @returns {Object.} JSON object + */ + Declaration.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Declaration + * @function getTypeUrl + * @memberof google.protobuf.ExtensionRangeOptions.Declaration + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Declaration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.ExtensionRangeOptions.Declaration"; + }; + + return Declaration; + })(); + + /** + * VerificationState enum. + * @name google.protobuf.ExtensionRangeOptions.VerificationState + * @enum {number} + * @property {number} DECLARATION=0 DECLARATION value + * @property {number} UNVERIFIED=1 UNVERIFIED value + */ + ExtensionRangeOptions.VerificationState = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DECLARATION"] = 0; + values[valuesById[1] = "UNVERIFIED"] = 1; + return values; + })(); + return ExtensionRangeOptions; })(); @@ -20950,6 +21377,7 @@ * @property {string|null} [phpNamespace] FileOptions phpNamespace * @property {string|null} [phpMetadataNamespace] FileOptions phpMetadataNamespace * @property {string|null} [rubyPackage] FileOptions rubyPackage + * @property {google.protobuf.IFeatureSet|null} [features] FileOptions features * @property {Array.|null} [uninterpretedOption] FileOptions uninterpretedOption * @property {Array.|null} [".google.api.resourceDefinition"] FileOptions .google.api.resourceDefinition */ @@ -21131,6 +21559,14 @@ */ FileOptions.prototype.rubyPackage = ""; + /** + * FileOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.FileOptions + * @instance + */ + FileOptions.prototype.features = null; + /** * FileOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -21211,6 +21647,8 @@ writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 50, wireType 2 =*/402).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -21331,6 +21769,10 @@ message.rubyPackage = reader.string(); break; } + case 50: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -21444,6 +21886,11 @@ if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) if (!$util.isString(message.rubyPackage)) return "rubyPackage: string expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -21535,6 +21982,11 @@ message.phpMetadataNamespace = String(object.phpMetadataNamespace); if (object.rubyPackage != null) message.rubyPackage = String(object.rubyPackage); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.FileOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FileOptions.uninterpretedOption: array expected"); @@ -21596,6 +22048,7 @@ object.phpGenericServices = false; object.phpMetadataNamespace = ""; object.rubyPackage = ""; + object.features = null; } if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) object.javaPackage = message.javaPackage; @@ -21637,6 +22090,8 @@ object.phpMetadataNamespace = message.phpMetadataNamespace; if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) object.rubyPackage = message.rubyPackage; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -21706,6 +22161,7 @@ * @property {boolean|null} [deprecated] MessageOptions deprecated * @property {boolean|null} [mapEntry] MessageOptions mapEntry * @property {boolean|null} [deprecatedLegacyJsonFieldConflicts] MessageOptions deprecatedLegacyJsonFieldConflicts + * @property {google.protobuf.IFeatureSet|null} [features] MessageOptions features * @property {Array.|null} [uninterpretedOption] MessageOptions uninterpretedOption * @property {google.api.IResourceDescriptor|null} [".google.api.resource"] MessageOptions .google.api.resource */ @@ -21766,6 +22222,14 @@ */ MessageOptions.prototype.deprecatedLegacyJsonFieldConflicts = false; + /** + * MessageOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.MessageOptions + * @instance + */ + MessageOptions.prototype.features = null; + /** * MessageOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -21816,6 +22280,8 @@ writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); if (message.deprecatedLegacyJsonFieldConflicts != null && Object.hasOwnProperty.call(message, "deprecatedLegacyJsonFieldConflicts")) writer.uint32(/* id 11, wireType 0 =*/88).bool(message.deprecatedLegacyJsonFieldConflicts); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -21875,6 +22341,10 @@ message.deprecatedLegacyJsonFieldConflicts = reader.bool(); break; } + case 12: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -21935,6 +22405,11 @@ if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) if (typeof message.deprecatedLegacyJsonFieldConflicts !== "boolean") return "deprecatedLegacyJsonFieldConflicts: boolean expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -21974,6 +22449,11 @@ message.mapEntry = Boolean(object.mapEntry); if (object.deprecatedLegacyJsonFieldConflicts != null) message.deprecatedLegacyJsonFieldConflicts = Boolean(object.deprecatedLegacyJsonFieldConflicts); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.MessageOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.MessageOptions.uninterpretedOption: array expected"); @@ -22013,6 +22493,7 @@ object.deprecated = false; object.mapEntry = false; object.deprecatedLegacyJsonFieldConflicts = false; + object.features = null; object[".google.api.resource"] = null; } if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) @@ -22025,6 +22506,8 @@ object.mapEntry = message.mapEntry; if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) object.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -22079,7 +22562,9 @@ * @property {boolean|null} [weak] FieldOptions weak * @property {boolean|null} [debugRedact] FieldOptions debugRedact * @property {google.protobuf.FieldOptions.OptionRetention|null} [retention] FieldOptions retention - * @property {google.protobuf.FieldOptions.OptionTargetType|null} [target] FieldOptions target + * @property {Array.|null} [targets] FieldOptions targets + * @property {Array.|null} [editionDefaults] FieldOptions editionDefaults + * @property {google.protobuf.IFeatureSet|null} [features] FieldOptions features * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior @@ -22095,6 +22580,8 @@ * @param {google.protobuf.IFieldOptions=} [properties] Properties to set */ function FieldOptions(properties) { + this.targets = []; + this.editionDefaults = []; this.uninterpretedOption = []; this[".google.api.fieldBehavior"] = []; if (properties) @@ -22176,12 +22663,28 @@ FieldOptions.prototype.retention = 0; /** - * FieldOptions target. - * @member {google.protobuf.FieldOptions.OptionTargetType} target + * FieldOptions targets. + * @member {Array.} targets + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.targets = $util.emptyArray; + + /** + * FieldOptions editionDefaults. + * @member {Array.} editionDefaults + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.editionDefaults = $util.emptyArray; + + /** + * FieldOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features * @memberof google.protobuf.FieldOptions * @instance */ - FieldOptions.prototype.target = 0; + FieldOptions.prototype.features = null; /** * FieldOptions uninterpretedOption. @@ -22257,8 +22760,14 @@ writer.uint32(/* id 16, wireType 0 =*/128).bool(message.debugRedact); if (message.retention != null && Object.hasOwnProperty.call(message, "retention")) writer.uint32(/* id 17, wireType 0 =*/136).int32(message.retention); - if (message.target != null && Object.hasOwnProperty.call(message, "target")) - writer.uint32(/* id 18, wireType 0 =*/144).int32(message.target); + if (message.targets != null && message.targets.length) + for (var i = 0; i < message.targets.length; ++i) + writer.uint32(/* id 19, wireType 0 =*/152).int32(message.targets[i]); + if (message.editionDefaults != null && message.editionDefaults.length) + for (var i = 0; i < message.editionDefaults.length; ++i) + $root.google.protobuf.FieldOptions.EditionDefault.encode(message.editionDefaults[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim(); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -22342,12 +22851,29 @@ message.retention = reader.int32(); break; } - case 18: { - message.target = reader.int32(); + case 19: { + if (!(message.targets && message.targets.length)) + message.targets = []; + if ((tag & 7) === 2) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) + message.targets.push(reader.int32()); + } else + message.targets.push(reader.int32()); break; } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + case 20: { + if (!(message.editionDefaults && message.editionDefaults.length)) + message.editionDefaults = []; + message.editionDefaults.push($root.google.protobuf.FieldOptions.EditionDefault.decode(reader, reader.uint32())); + break; + } + case 21: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; @@ -22451,22 +22977,40 @@ case 2: break; } - if (message.target != null && message.hasOwnProperty("target")) - switch (message.target) { - default: - return "target: enum value expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - break; + if (message.targets != null && message.hasOwnProperty("targets")) { + if (!Array.isArray(message.targets)) + return "targets: array expected"; + for (var i = 0; i < message.targets.length; ++i) + switch (message.targets[i]) { + default: + return "targets: enum value[] expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + break; + } + } + if (message.editionDefaults != null && message.hasOwnProperty("editionDefaults")) { + if (!Array.isArray(message.editionDefaults)) + return "editionDefaults: array expected"; + for (var i = 0; i < message.editionDefaults.length; ++i) { + var error = $root.google.protobuf.FieldOptions.EditionDefault.verify(message.editionDefaults[i]); + if (error) + return "editionDefaults." + error; } + } + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -22589,53 +23133,73 @@ message.retention = 2; break; } - switch (object.target) { - default: - if (typeof object.target === "number") { - message.target = object.target; - break; + if (object.targets) { + if (!Array.isArray(object.targets)) + throw TypeError(".google.protobuf.FieldOptions.targets: array expected"); + message.targets = []; + for (var i = 0; i < object.targets.length; ++i) + switch (object.targets[i]) { + default: + if (typeof object.targets[i] === "number") { + message.targets[i] = object.targets[i]; + break; + } + case "TARGET_TYPE_UNKNOWN": + case 0: + message.targets[i] = 0; + break; + case "TARGET_TYPE_FILE": + case 1: + message.targets[i] = 1; + break; + case "TARGET_TYPE_EXTENSION_RANGE": + case 2: + message.targets[i] = 2; + break; + case "TARGET_TYPE_MESSAGE": + case 3: + message.targets[i] = 3; + break; + case "TARGET_TYPE_FIELD": + case 4: + message.targets[i] = 4; + break; + case "TARGET_TYPE_ONEOF": + case 5: + message.targets[i] = 5; + break; + case "TARGET_TYPE_ENUM": + case 6: + message.targets[i] = 6; + break; + case "TARGET_TYPE_ENUM_ENTRY": + case 7: + message.targets[i] = 7; + break; + case "TARGET_TYPE_SERVICE": + case 8: + message.targets[i] = 8; + break; + case "TARGET_TYPE_METHOD": + case 9: + message.targets[i] = 9; + break; + } + } + if (object.editionDefaults) { + if (!Array.isArray(object.editionDefaults)) + throw TypeError(".google.protobuf.FieldOptions.editionDefaults: array expected"); + message.editionDefaults = []; + for (var i = 0; i < object.editionDefaults.length; ++i) { + if (typeof object.editionDefaults[i] !== "object") + throw TypeError(".google.protobuf.FieldOptions.editionDefaults: object expected"); + message.editionDefaults[i] = $root.google.protobuf.FieldOptions.EditionDefault.fromObject(object.editionDefaults[i]); } - break; - case "TARGET_TYPE_UNKNOWN": - case 0: - message.target = 0; - break; - case "TARGET_TYPE_FILE": - case 1: - message.target = 1; - break; - case "TARGET_TYPE_EXTENSION_RANGE": - case 2: - message.target = 2; - break; - case "TARGET_TYPE_MESSAGE": - case 3: - message.target = 3; - break; - case "TARGET_TYPE_FIELD": - case 4: - message.target = 4; - break; - case "TARGET_TYPE_ONEOF": - case 5: - message.target = 5; - break; - case "TARGET_TYPE_ENUM": - case 6: - message.target = 6; - break; - case "TARGET_TYPE_ENUM_ENTRY": - case 7: - message.target = 7; - break; - case "TARGET_TYPE_SERVICE": - case 8: - message.target = 8; - break; - case "TARGET_TYPE_METHOD": - case 9: - message.target = 9; - break; + } + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.FieldOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) @@ -22716,6 +23280,8 @@ options = {}; var object = {}; if (options.arrays || options.defaults) { + object.targets = []; + object.editionDefaults = []; object.uninterpretedOption = []; object[".google.api.fieldBehavior"] = []; } @@ -22729,7 +23295,7 @@ object.unverifiedLazy = false; object.debugRedact = false; object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; - object.target = options.enums === String ? "TARGET_TYPE_UNKNOWN" : 0; + object.features = null; object[".google.api.resourceReference"] = null; object[".google.cloud.bigquery.storage.v1.columnName"] = null; } @@ -22751,8 +23317,18 @@ object.debugRedact = message.debugRedact; if (message.retention != null && message.hasOwnProperty("retention")) object.retention = options.enums === String ? $root.google.protobuf.FieldOptions.OptionRetention[message.retention] === undefined ? message.retention : $root.google.protobuf.FieldOptions.OptionRetention[message.retention] : message.retention; - if (message.target != null && message.hasOwnProperty("target")) - object.target = options.enums === String ? $root.google.protobuf.FieldOptions.OptionTargetType[message.target] === undefined ? message.target : $root.google.protobuf.FieldOptions.OptionTargetType[message.target] : message.target; + if (message.targets && message.targets.length) { + object.targets = []; + for (var j = 0; j < message.targets.length; ++j) + object.targets[j] = options.enums === String ? $root.google.protobuf.FieldOptions.OptionTargetType[message.targets[j]] === undefined ? message.targets[j] : $root.google.protobuf.FieldOptions.OptionTargetType[message.targets[j]] : message.targets[j]; + } + if (message.editionDefaults && message.editionDefaults.length) { + object.editionDefaults = []; + for (var j = 0; j < message.editionDefaults.length; ++j) + object.editionDefaults[j] = $root.google.protobuf.FieldOptions.EditionDefault.toObject(message.editionDefaults[j], options); + } + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -22874,6 +23450,233 @@ return values; })(); + FieldOptions.EditionDefault = (function() { + + /** + * Properties of an EditionDefault. + * @memberof google.protobuf.FieldOptions + * @interface IEditionDefault + * @property {string|null} [edition] EditionDefault edition + * @property {string|null} [value] EditionDefault value + */ + + /** + * Constructs a new EditionDefault. + * @memberof google.protobuf.FieldOptions + * @classdesc Represents an EditionDefault. + * @implements IEditionDefault + * @constructor + * @param {google.protobuf.FieldOptions.IEditionDefault=} [properties] Properties to set + */ + function EditionDefault(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EditionDefault edition. + * @member {string} edition + * @memberof google.protobuf.FieldOptions.EditionDefault + * @instance + */ + EditionDefault.prototype.edition = ""; + + /** + * EditionDefault value. + * @member {string} value + * @memberof google.protobuf.FieldOptions.EditionDefault + * @instance + */ + EditionDefault.prototype.value = ""; + + /** + * Creates a new EditionDefault instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {google.protobuf.FieldOptions.IEditionDefault=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions.EditionDefault} EditionDefault instance + */ + EditionDefault.create = function create(properties) { + return new EditionDefault(properties); + }; + + /** + * Encodes the specified EditionDefault message. Does not implicitly {@link google.protobuf.FieldOptions.EditionDefault.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {google.protobuf.FieldOptions.IEditionDefault} message EditionDefault message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EditionDefault.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.edition); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.value); + return writer; + }; + + /** + * Encodes the specified EditionDefault message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.EditionDefault.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {google.protobuf.FieldOptions.IEditionDefault} message EditionDefault message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EditionDefault.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EditionDefault message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions.EditionDefault} EditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EditionDefault.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.EditionDefault(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.edition = reader.string(); + break; + } + case 2: { + message.value = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EditionDefault message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions.EditionDefault} EditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EditionDefault.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EditionDefault message. + * @function verify + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EditionDefault.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.edition != null && message.hasOwnProperty("edition")) + if (!$util.isString(message.edition)) + return "edition: string expected"; + if (message.value != null && message.hasOwnProperty("value")) + if (!$util.isString(message.value)) + return "value: string expected"; + return null; + }; + + /** + * Creates an EditionDefault message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions.EditionDefault} EditionDefault + */ + EditionDefault.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions.EditionDefault) + return object; + var message = new $root.google.protobuf.FieldOptions.EditionDefault(); + if (object.edition != null) + message.edition = String(object.edition); + if (object.value != null) + message.value = String(object.value); + return message; + }; + + /** + * Creates a plain object from an EditionDefault message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {google.protobuf.FieldOptions.EditionDefault} message EditionDefault + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EditionDefault.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.edition = ""; + object.value = ""; + } + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = message.edition; + if (message.value != null && message.hasOwnProperty("value")) + object.value = message.value; + return object; + }; + + /** + * Converts this EditionDefault to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions.EditionDefault + * @instance + * @returns {Object.} JSON object + */ + EditionDefault.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EditionDefault + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions.EditionDefault + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EditionDefault.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions.EditionDefault"; + }; + + return EditionDefault; + })(); + return FieldOptions; })(); @@ -22883,6 +23686,7 @@ * Properties of an OneofOptions. * @memberof google.protobuf * @interface IOneofOptions + * @property {google.protobuf.IFeatureSet|null} [features] OneofOptions features * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption */ @@ -22902,6 +23706,14 @@ this[keys[i]] = properties[keys[i]]; } + /** + * OneofOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.OneofOptions + * @instance + */ + OneofOptions.prototype.features = null; + /** * OneofOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -22934,6 +23746,8 @@ OneofOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -22971,6 +23785,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { + case 1: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -23012,6 +23830,11 @@ OneofOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -23036,6 +23859,11 @@ if (object instanceof $root.google.protobuf.OneofOptions) return object; var message = new $root.google.protobuf.OneofOptions(); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.OneofOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.OneofOptions.uninterpretedOption: array expected"); @@ -23064,6 +23892,10 @@ var object = {}; if (options.arrays || options.defaults) object.uninterpretedOption = []; + if (options.defaults) + object.features = null; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -23110,6 +23942,7 @@ * @property {boolean|null} [allowAlias] EnumOptions allowAlias * @property {boolean|null} [deprecated] EnumOptions deprecated * @property {boolean|null} [deprecatedLegacyJsonFieldConflicts] EnumOptions deprecatedLegacyJsonFieldConflicts + * @property {google.protobuf.IFeatureSet|null} [features] EnumOptions features * @property {Array.|null} [uninterpretedOption] EnumOptions uninterpretedOption */ @@ -23153,6 +23986,14 @@ */ EnumOptions.prototype.deprecatedLegacyJsonFieldConflicts = false; + /** + * EnumOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.EnumOptions + * @instance + */ + EnumOptions.prototype.features = null; + /** * EnumOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -23191,6 +24032,8 @@ writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); if (message.deprecatedLegacyJsonFieldConflicts != null && Object.hasOwnProperty.call(message, "deprecatedLegacyJsonFieldConflicts")) writer.uint32(/* id 6, wireType 0 =*/48).bool(message.deprecatedLegacyJsonFieldConflicts); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -23240,6 +24083,10 @@ message.deprecatedLegacyJsonFieldConflicts = reader.bool(); break; } + case 7: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -23290,6 +24137,11 @@ if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) if (typeof message.deprecatedLegacyJsonFieldConflicts !== "boolean") return "deprecatedLegacyJsonFieldConflicts: boolean expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -23320,6 +24172,11 @@ message.deprecated = Boolean(object.deprecated); if (object.deprecatedLegacyJsonFieldConflicts != null) message.deprecatedLegacyJsonFieldConflicts = Boolean(object.deprecatedLegacyJsonFieldConflicts); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.EnumOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumOptions.uninterpretedOption: array expected"); @@ -23352,6 +24209,7 @@ object.allowAlias = false; object.deprecated = false; object.deprecatedLegacyJsonFieldConflicts = false; + object.features = null; } if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) object.allowAlias = message.allowAlias; @@ -23359,6 +24217,8 @@ object.deprecated = message.deprecated; if (message.deprecatedLegacyJsonFieldConflicts != null && message.hasOwnProperty("deprecatedLegacyJsonFieldConflicts")) object.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -23403,6 +24263,8 @@ * @memberof google.protobuf * @interface IEnumValueOptions * @property {boolean|null} [deprecated] EnumValueOptions deprecated + * @property {google.protobuf.IFeatureSet|null} [features] EnumValueOptions features + * @property {boolean|null} [debugRedact] EnumValueOptions debugRedact * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption */ @@ -23431,19 +24293,35 @@ EnumValueOptions.prototype.deprecated = false; /** - * EnumValueOptions uninterpretedOption. - * @member {Array.} uninterpretedOption + * EnumValueOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features * @memberof google.protobuf.EnumValueOptions * @instance */ - EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; + EnumValueOptions.prototype.features = null; /** - * Creates a new EnumValueOptions instance using the specified properties. - * @function create + * EnumValueOptions debugRedact. + * @member {boolean} debugRedact * @memberof google.protobuf.EnumValueOptions - * @static - * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set + * @instance + */ + EnumValueOptions.prototype.debugRedact = false; + + /** + * EnumValueOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new EnumValueOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.EnumValueOptions + * @static + * @param {google.protobuf.IEnumValueOptions=} [properties] Properties to set * @returns {google.protobuf.EnumValueOptions} EnumValueOptions instance */ EnumValueOptions.create = function create(properties) { @@ -23464,6 +24342,10 @@ writer = $Writer.create(); if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.debugRedact); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -23505,6 +24387,14 @@ message.deprecated = reader.bool(); break; } + case 2: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 3: { + message.debugRedact = reader.bool(); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -23549,6 +24439,14 @@ if (message.deprecated != null && message.hasOwnProperty("deprecated")) if (typeof message.deprecated !== "boolean") return "deprecated: boolean expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } + if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) + if (typeof message.debugRedact !== "boolean") + return "debugRedact: boolean expected"; if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -23575,6 +24473,13 @@ var message = new $root.google.protobuf.EnumValueOptions(); if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } + if (object.debugRedact != null) + message.debugRedact = Boolean(object.debugRedact); if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); @@ -23603,10 +24508,17 @@ var object = {}; if (options.arrays || options.defaults) object.uninterpretedOption = []; - if (options.defaults) + if (options.defaults) { object.deprecated = false; + object.features = null; + object.debugRedact = false; + } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); + if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) + object.debugRedact = message.debugRedact; if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -23650,6 +24562,7 @@ * Properties of a ServiceOptions. * @memberof google.protobuf * @interface IServiceOptions + * @property {google.protobuf.IFeatureSet|null} [features] ServiceOptions features * @property {boolean|null} [deprecated] ServiceOptions deprecated * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost @@ -23672,6 +24585,14 @@ this[keys[i]] = properties[keys[i]]; } + /** + * ServiceOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype.features = null; + /** * ServiceOptions deprecated. * @member {boolean} deprecated @@ -23730,6 +24651,8 @@ writer = $Writer.create(); if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 34, wireType 2 =*/274).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -23771,6 +24694,10 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { + case 34: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 33: { message.deprecated = reader.bool(); break; @@ -23824,6 +24751,11 @@ ServiceOptions.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.deprecated != null && message.hasOwnProperty("deprecated")) if (typeof message.deprecated !== "boolean") return "deprecated: boolean expected"; @@ -23857,6 +24789,11 @@ if (object instanceof $root.google.protobuf.ServiceOptions) return object; var message = new $root.google.protobuf.ServiceOptions(); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.ServiceOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); if (object.uninterpretedOption) { @@ -23893,11 +24830,14 @@ object.uninterpretedOption = []; if (options.defaults) { object.deprecated = false; + object.features = null; object[".google.api.defaultHost"] = ""; object[".google.api.oauthScopes"] = ""; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -23947,6 +24887,7 @@ * @interface IMethodOptions * @property {boolean|null} [deprecated] MethodOptions deprecated * @property {google.protobuf.MethodOptions.IdempotencyLevel|null} [idempotencyLevel] MethodOptions idempotencyLevel + * @property {google.protobuf.IFeatureSet|null} [features] MethodOptions features * @property {Array.|null} [uninterpretedOption] MethodOptions uninterpretedOption * @property {google.api.IHttpRule|null} [".google.api.http"] MethodOptions .google.api.http * @property {Array.|null} [".google.api.methodSignature"] MethodOptions .google.api.methodSignature @@ -23985,6 +24926,14 @@ */ MethodOptions.prototype.idempotencyLevel = 0; + /** + * MethodOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.MethodOptions + * @instance + */ + MethodOptions.prototype.features = null; + /** * MethodOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -24037,6 +24986,8 @@ writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 35, wireType 2 =*/282).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -24087,6 +25038,10 @@ message.idempotencyLevel = reader.int32(); break; } + case 35: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -24150,6 +25105,11 @@ case 2: break; } + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -24208,6 +25168,11 @@ message.idempotencyLevel = 2; break; } + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.MethodOptions.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.MethodOptions.uninterpretedOption: array expected"); @@ -24253,12 +25218,15 @@ if (options.defaults) { object.deprecated = false; object.idempotencyLevel = options.enums === String ? "IDEMPOTENCY_UNKNOWN" : 0; + object.features = null; object[".google.api.http"] = null; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) object.idempotencyLevel = options.enums === String ? $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] === undefined ? message.idempotencyLevel : $root.google.protobuf.MethodOptions.IdempotencyLevel[message.idempotencyLevel] : message.idempotencyLevel; + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -24838,113 +25806,714 @@ return message; }; - /** - * Decodes a NamePart message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - NamePart.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; + /** + * Decodes a NamePart message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + NamePart.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a NamePart message. + * @function verify + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + NamePart.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (!$util.isString(message.namePart)) + return "namePart: string expected"; + if (typeof message.isExtension !== "boolean") + return "isExtension: boolean expected"; + return null; + }; + + /** + * Creates a NamePart message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart + */ + NamePart.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) + return object; + var message = new $root.google.protobuf.UninterpretedOption.NamePart(); + if (object.namePart != null) + message.namePart = String(object.namePart); + if (object.isExtension != null) + message.isExtension = Boolean(object.isExtension); + return message; + }; + + /** + * Creates a plain object from a NamePart message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + NamePart.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.namePart = ""; + object.isExtension = false; + } + if (message.namePart != null && message.hasOwnProperty("namePart")) + object.namePart = message.namePart; + if (message.isExtension != null && message.hasOwnProperty("isExtension")) + object.isExtension = message.isExtension; + return object; + }; + + /** + * Converts this NamePart to JSON. + * @function toJSON + * @memberof google.protobuf.UninterpretedOption.NamePart + * @instance + * @returns {Object.} JSON object + */ + NamePart.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for NamePart + * @function getTypeUrl + * @memberof google.protobuf.UninterpretedOption.NamePart + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + NamePart.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.UninterpretedOption.NamePart"; + }; + + return NamePart; + })(); + + return UninterpretedOption; + })(); + + protobuf.FeatureSet = (function() { + + /** + * Properties of a FeatureSet. + * @memberof google.protobuf + * @interface IFeatureSet + * @property {google.protobuf.FeatureSet.FieldPresence|null} [fieldPresence] FeatureSet fieldPresence + * @property {google.protobuf.FeatureSet.EnumType|null} [enumType] FeatureSet enumType + * @property {google.protobuf.FeatureSet.RepeatedFieldEncoding|null} [repeatedFieldEncoding] FeatureSet repeatedFieldEncoding + * @property {google.protobuf.FeatureSet.StringFieldValidation|null} [stringFieldValidation] FeatureSet stringFieldValidation + * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding + * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat + * @property {google.protobuf.IFeatureSet|null} [rawFeatures] FeatureSet rawFeatures + */ + + /** + * Constructs a new FeatureSet. + * @memberof google.protobuf + * @classdesc Represents a FeatureSet. + * @implements IFeatureSet + * @constructor + * @param {google.protobuf.IFeatureSet=} [properties] Properties to set + */ + function FeatureSet(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FeatureSet fieldPresence. + * @member {google.protobuf.FeatureSet.FieldPresence} fieldPresence + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.fieldPresence = 0; + + /** + * FeatureSet enumType. + * @member {google.protobuf.FeatureSet.EnumType} enumType + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.enumType = 0; + + /** + * FeatureSet repeatedFieldEncoding. + * @member {google.protobuf.FeatureSet.RepeatedFieldEncoding} repeatedFieldEncoding + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.repeatedFieldEncoding = 0; + + /** + * FeatureSet stringFieldValidation. + * @member {google.protobuf.FeatureSet.StringFieldValidation} stringFieldValidation + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.stringFieldValidation = 0; + + /** + * FeatureSet messageEncoding. + * @member {google.protobuf.FeatureSet.MessageEncoding} messageEncoding + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.messageEncoding = 0; + + /** + * FeatureSet jsonFormat. + * @member {google.protobuf.FeatureSet.JsonFormat} jsonFormat + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.jsonFormat = 0; + + /** + * FeatureSet rawFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} rawFeatures + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.rawFeatures = null; + + /** + * Creates a new FeatureSet instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSet + * @static + * @param {google.protobuf.IFeatureSet=} [properties] Properties to set + * @returns {google.protobuf.FeatureSet} FeatureSet instance + */ + FeatureSet.create = function create(properties) { + return new FeatureSet(properties); + }; + + /** + * Encodes the specified FeatureSet message. Does not implicitly {@link google.protobuf.FeatureSet.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSet + * @static + * @param {google.protobuf.IFeatureSet} message FeatureSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSet.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.fieldPresence != null && Object.hasOwnProperty.call(message, "fieldPresence")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.fieldPresence); + if (message.enumType != null && Object.hasOwnProperty.call(message, "enumType")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.enumType); + if (message.repeatedFieldEncoding != null && Object.hasOwnProperty.call(message, "repeatedFieldEncoding")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.repeatedFieldEncoding); + if (message.stringFieldValidation != null && Object.hasOwnProperty.call(message, "stringFieldValidation")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.stringFieldValidation); + if (message.messageEncoding != null && Object.hasOwnProperty.call(message, "messageEncoding")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); + if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); + if (message.rawFeatures != null && Object.hasOwnProperty.call(message, "rawFeatures")) + $root.google.protobuf.FeatureSet.encode(message.rawFeatures, writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified FeatureSet message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSet + * @static + * @param {google.protobuf.IFeatureSet} message FeatureSet message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSet.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSet message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSet} FeatureSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSet.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.fieldPresence = reader.int32(); + break; + } + case 2: { + message.enumType = reader.int32(); + break; + } + case 3: { + message.repeatedFieldEncoding = reader.int32(); + break; + } + case 4: { + message.stringFieldValidation = reader.int32(); + break; + } + case 5: { + message.messageEncoding = reader.int32(); + break; + } + case 6: { + message.jsonFormat = reader.int32(); + break; + } + case 999: { + message.rawFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FeatureSet message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSet + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSet} FeatureSet + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSet.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSet message. + * @function verify + * @memberof google.protobuf.FeatureSet + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSet.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) + switch (message.fieldPresence) { + default: + return "fieldPresence: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.enumType != null && message.hasOwnProperty("enumType")) + switch (message.enumType) { + default: + return "enumType: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.repeatedFieldEncoding != null && message.hasOwnProperty("repeatedFieldEncoding")) + switch (message.repeatedFieldEncoding) { + default: + return "repeatedFieldEncoding: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.stringFieldValidation != null && message.hasOwnProperty("stringFieldValidation")) + switch (message.stringFieldValidation) { + default: + return "stringFieldValidation: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } + if (message.messageEncoding != null && message.hasOwnProperty("messageEncoding")) + switch (message.messageEncoding) { + default: + return "messageEncoding: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) + switch (message.jsonFormat) { + default: + return "jsonFormat: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.rawFeatures != null && message.hasOwnProperty("rawFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.rawFeatures); + if (error) + return "rawFeatures." + error; + } + return null; + }; + + /** + * Creates a FeatureSet message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSet + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSet} FeatureSet + */ + FeatureSet.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSet) + return object; + var message = new $root.google.protobuf.FeatureSet(); + switch (object.fieldPresence) { + default: + if (typeof object.fieldPresence === "number") { + message.fieldPresence = object.fieldPresence; + break; + } + break; + case "FIELD_PRESENCE_UNKNOWN": + case 0: + message.fieldPresence = 0; + break; + case "EXPLICIT": + case 1: + message.fieldPresence = 1; + break; + case "IMPLICIT": + case 2: + message.fieldPresence = 2; + break; + case "LEGACY_REQUIRED": + case 3: + message.fieldPresence = 3; + break; + } + switch (object.enumType) { + default: + if (typeof object.enumType === "number") { + message.enumType = object.enumType; + break; + } + break; + case "ENUM_TYPE_UNKNOWN": + case 0: + message.enumType = 0; + break; + case "OPEN": + case 1: + message.enumType = 1; + break; + case "CLOSED": + case 2: + message.enumType = 2; + break; + } + switch (object.repeatedFieldEncoding) { + default: + if (typeof object.repeatedFieldEncoding === "number") { + message.repeatedFieldEncoding = object.repeatedFieldEncoding; + break; + } + break; + case "REPEATED_FIELD_ENCODING_UNKNOWN": + case 0: + message.repeatedFieldEncoding = 0; + break; + case "PACKED": + case 1: + message.repeatedFieldEncoding = 1; + break; + case "EXPANDED": + case 2: + message.repeatedFieldEncoding = 2; + break; + } + switch (object.stringFieldValidation) { + default: + if (typeof object.stringFieldValidation === "number") { + message.stringFieldValidation = object.stringFieldValidation; + break; + } + break; + case "STRING_FIELD_VALIDATION_UNKNOWN": + case 0: + message.stringFieldValidation = 0; + break; + case "MANDATORY": + case 1: + message.stringFieldValidation = 1; + break; + case "HINT": + case 2: + message.stringFieldValidation = 2; + break; + case "NONE": + case 3: + message.stringFieldValidation = 3; + break; + } + switch (object.messageEncoding) { + default: + if (typeof object.messageEncoding === "number") { + message.messageEncoding = object.messageEncoding; + break; + } + break; + case "MESSAGE_ENCODING_UNKNOWN": + case 0: + message.messageEncoding = 0; + break; + case "LENGTH_PREFIXED": + case 1: + message.messageEncoding = 1; + break; + case "DELIMITED": + case 2: + message.messageEncoding = 2; + break; + } + switch (object.jsonFormat) { + default: + if (typeof object.jsonFormat === "number") { + message.jsonFormat = object.jsonFormat; + break; + } + break; + case "JSON_FORMAT_UNKNOWN": + case 0: + message.jsonFormat = 0; + break; + case "ALLOW": + case 1: + message.jsonFormat = 1; + break; + case "LEGACY_BEST_EFFORT": + case 2: + message.jsonFormat = 2; + break; + } + if (object.rawFeatures != null) { + if (typeof object.rawFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSet.rawFeatures: object expected"); + message.rawFeatures = $root.google.protobuf.FeatureSet.fromObject(object.rawFeatures); + } + return message; + }; + + /** + * Creates a plain object from a FeatureSet message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSet + * @static + * @param {google.protobuf.FeatureSet} message FeatureSet + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FeatureSet.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.fieldPresence = options.enums === String ? "FIELD_PRESENCE_UNKNOWN" : 0; + object.enumType = options.enums === String ? "ENUM_TYPE_UNKNOWN" : 0; + object.repeatedFieldEncoding = options.enums === String ? "REPEATED_FIELD_ENCODING_UNKNOWN" : 0; + object.stringFieldValidation = options.enums === String ? "STRING_FIELD_VALIDATION_UNKNOWN" : 0; + object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; + object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; + object.rawFeatures = null; + } + if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) + object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; + if (message.enumType != null && message.hasOwnProperty("enumType")) + object.enumType = options.enums === String ? $root.google.protobuf.FeatureSet.EnumType[message.enumType] === undefined ? message.enumType : $root.google.protobuf.FeatureSet.EnumType[message.enumType] : message.enumType; + if (message.repeatedFieldEncoding != null && message.hasOwnProperty("repeatedFieldEncoding")) + object.repeatedFieldEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.RepeatedFieldEncoding[message.repeatedFieldEncoding] === undefined ? message.repeatedFieldEncoding : $root.google.protobuf.FeatureSet.RepeatedFieldEncoding[message.repeatedFieldEncoding] : message.repeatedFieldEncoding; + if (message.stringFieldValidation != null && message.hasOwnProperty("stringFieldValidation")) + object.stringFieldValidation = options.enums === String ? $root.google.protobuf.FeatureSet.StringFieldValidation[message.stringFieldValidation] === undefined ? message.stringFieldValidation : $root.google.protobuf.FeatureSet.StringFieldValidation[message.stringFieldValidation] : message.stringFieldValidation; + if (message.messageEncoding != null && message.hasOwnProperty("messageEncoding")) + object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; + if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) + object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; + if (message.rawFeatures != null && message.hasOwnProperty("rawFeatures")) + object.rawFeatures = $root.google.protobuf.FeatureSet.toObject(message.rawFeatures, options); + return object; + }; + + /** + * Converts this FeatureSet to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSet + * @instance + * @returns {Object.} JSON object + */ + FeatureSet.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FeatureSet + * @function getTypeUrl + * @memberof google.protobuf.FeatureSet + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FeatureSet.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSet"; + }; - /** - * Verifies a NamePart message. - * @function verify - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - NamePart.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (!$util.isString(message.namePart)) - return "namePart: string expected"; - if (typeof message.isExtension !== "boolean") - return "isExtension: boolean expected"; - return null; - }; + /** + * FieldPresence enum. + * @name google.protobuf.FeatureSet.FieldPresence + * @enum {number} + * @property {number} FIELD_PRESENCE_UNKNOWN=0 FIELD_PRESENCE_UNKNOWN value + * @property {number} EXPLICIT=1 EXPLICIT value + * @property {number} IMPLICIT=2 IMPLICIT value + * @property {number} LEGACY_REQUIRED=3 LEGACY_REQUIRED value + */ + FeatureSet.FieldPresence = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "FIELD_PRESENCE_UNKNOWN"] = 0; + values[valuesById[1] = "EXPLICIT"] = 1; + values[valuesById[2] = "IMPLICIT"] = 2; + values[valuesById[3] = "LEGACY_REQUIRED"] = 3; + return values; + })(); - /** - * Creates a NamePart message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.UninterpretedOption.NamePart} NamePart - */ - NamePart.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.UninterpretedOption.NamePart) - return object; - var message = new $root.google.protobuf.UninterpretedOption.NamePart(); - if (object.namePart != null) - message.namePart = String(object.namePart); - if (object.isExtension != null) - message.isExtension = Boolean(object.isExtension); - return message; - }; + /** + * EnumType enum. + * @name google.protobuf.FeatureSet.EnumType + * @enum {number} + * @property {number} ENUM_TYPE_UNKNOWN=0 ENUM_TYPE_UNKNOWN value + * @property {number} OPEN=1 OPEN value + * @property {number} CLOSED=2 CLOSED value + */ + FeatureSet.EnumType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ENUM_TYPE_UNKNOWN"] = 0; + values[valuesById[1] = "OPEN"] = 1; + values[valuesById[2] = "CLOSED"] = 2; + return values; + })(); - /** - * Creates a plain object from a NamePart message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {google.protobuf.UninterpretedOption.NamePart} message NamePart - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - NamePart.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.namePart = ""; - object.isExtension = false; - } - if (message.namePart != null && message.hasOwnProperty("namePart")) - object.namePart = message.namePart; - if (message.isExtension != null && message.hasOwnProperty("isExtension")) - object.isExtension = message.isExtension; - return object; - }; + /** + * RepeatedFieldEncoding enum. + * @name google.protobuf.FeatureSet.RepeatedFieldEncoding + * @enum {number} + * @property {number} REPEATED_FIELD_ENCODING_UNKNOWN=0 REPEATED_FIELD_ENCODING_UNKNOWN value + * @property {number} PACKED=1 PACKED value + * @property {number} EXPANDED=2 EXPANDED value + */ + FeatureSet.RepeatedFieldEncoding = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "REPEATED_FIELD_ENCODING_UNKNOWN"] = 0; + values[valuesById[1] = "PACKED"] = 1; + values[valuesById[2] = "EXPANDED"] = 2; + return values; + })(); - /** - * Converts this NamePart to JSON. - * @function toJSON - * @memberof google.protobuf.UninterpretedOption.NamePart - * @instance - * @returns {Object.} JSON object - */ - NamePart.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * StringFieldValidation enum. + * @name google.protobuf.FeatureSet.StringFieldValidation + * @enum {number} + * @property {number} STRING_FIELD_VALIDATION_UNKNOWN=0 STRING_FIELD_VALIDATION_UNKNOWN value + * @property {number} MANDATORY=1 MANDATORY value + * @property {number} HINT=2 HINT value + * @property {number} NONE=3 NONE value + */ + FeatureSet.StringFieldValidation = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "STRING_FIELD_VALIDATION_UNKNOWN"] = 0; + values[valuesById[1] = "MANDATORY"] = 1; + values[valuesById[2] = "HINT"] = 2; + values[valuesById[3] = "NONE"] = 3; + return values; + })(); - /** - * Gets the default type url for NamePart - * @function getTypeUrl - * @memberof google.protobuf.UninterpretedOption.NamePart - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - NamePart.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.UninterpretedOption.NamePart"; - }; + /** + * MessageEncoding enum. + * @name google.protobuf.FeatureSet.MessageEncoding + * @enum {number} + * @property {number} MESSAGE_ENCODING_UNKNOWN=0 MESSAGE_ENCODING_UNKNOWN value + * @property {number} LENGTH_PREFIXED=1 LENGTH_PREFIXED value + * @property {number} DELIMITED=2 DELIMITED value + */ + FeatureSet.MessageEncoding = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MESSAGE_ENCODING_UNKNOWN"] = 0; + values[valuesById[1] = "LENGTH_PREFIXED"] = 1; + values[valuesById[2] = "DELIMITED"] = 2; + return values; + })(); - return NamePart; + /** + * JsonFormat enum. + * @name google.protobuf.FeatureSet.JsonFormat + * @enum {number} + * @property {number} JSON_FORMAT_UNKNOWN=0 JSON_FORMAT_UNKNOWN value + * @property {number} ALLOW=1 ALLOW value + * @property {number} LEGACY_BEST_EFFORT=2 LEGACY_BEST_EFFORT value + */ + FeatureSet.JsonFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "JSON_FORMAT_UNKNOWN"] = 0; + values[valuesById[1] = "ALLOW"] = 1; + values[valuesById[2] = "LEGACY_BEST_EFFORT"] = 2; + return values; })(); - return UninterpretedOption; + return FeatureSet; })(); protobuf.SourceCodeInfo = (function() { @@ -30663,6 +32232,7 @@ * @property {string|null} [docTagPrefix] Publishing docTagPrefix * @property {google.api.ClientLibraryOrganization|null} [organization] Publishing organization * @property {Array.|null} [librarySettings] Publishing librarySettings + * @property {string|null} [protoReferenceDocumentationUri] Publishing protoReferenceDocumentationUri */ /** @@ -30755,6 +32325,14 @@ */ Publishing.prototype.librarySettings = $util.emptyArray; + /** + * Publishing protoReferenceDocumentationUri. + * @member {string} protoReferenceDocumentationUri + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.protoReferenceDocumentationUri = ""; + /** * Creates a new Publishing instance using the specified properties. * @function create @@ -30800,6 +32378,8 @@ if (message.librarySettings != null && message.librarySettings.length) for (var i = 0; i < message.librarySettings.length; ++i) $root.google.api.ClientLibrarySettings.encode(message.librarySettings[i], writer.uint32(/* id 109, wireType 2 =*/874).fork()).ldelim(); + if (message.protoReferenceDocumentationUri != null && Object.hasOwnProperty.call(message, "protoReferenceDocumentationUri")) + writer.uint32(/* id 110, wireType 2 =*/882).string(message.protoReferenceDocumentationUri); return writer; }; @@ -30876,6 +32456,10 @@ message.librarySettings.push($root.google.api.ClientLibrarySettings.decode(reader, reader.uint32())); break; } + case 110: { + message.protoReferenceDocumentationUri = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -30951,6 +32535,9 @@ case 2: case 3: case 4: + case 5: + case 6: + case 7: break; } if (message.librarySettings != null && message.hasOwnProperty("librarySettings")) { @@ -30962,6 +32549,9 @@ return "librarySettings." + error; } } + if (message.protoReferenceDocumentationUri != null && message.hasOwnProperty("protoReferenceDocumentationUri")) + if (!$util.isString(message.protoReferenceDocumentationUri)) + return "protoReferenceDocumentationUri: string expected"; return null; }; @@ -31031,6 +32621,18 @@ case 4: message.organization = 4; break; + case "SHOPPING": + case 5: + message.organization = 5; + break; + case "GEO": + case 6: + message.organization = 6; + break; + case "GENERATIVE_AI": + case 7: + message.organization = 7; + break; } if (object.librarySettings) { if (!Array.isArray(object.librarySettings)) @@ -31042,6 +32644,8 @@ message.librarySettings[i] = $root.google.api.ClientLibrarySettings.fromObject(object.librarySettings[i]); } } + if (object.protoReferenceDocumentationUri != null) + message.protoReferenceDocumentationUri = String(object.protoReferenceDocumentationUri); return message; }; @@ -31070,6 +32674,7 @@ object.githubLabel = ""; object.docTagPrefix = ""; object.organization = options.enums === String ? "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED" : 0; + object.protoReferenceDocumentationUri = ""; } if (message.methodSettings && message.methodSettings.length) { object.methodSettings = []; @@ -31098,6 +32703,8 @@ for (var j = 0; j < message.librarySettings.length; ++j) object.librarySettings[j] = $root.google.api.ClientLibrarySettings.toObject(message.librarySettings[j], options); } + if (message.protoReferenceDocumentationUri != null && message.hasOwnProperty("protoReferenceDocumentationUri")) + object.protoReferenceDocumentationUri = message.protoReferenceDocumentationUri; return object; }; @@ -32260,6 +33867,11 @@ * @memberof google.api * @interface IDotnetSettings * @property {google.api.ICommonLanguageSettings|null} [common] DotnetSettings common + * @property {Object.|null} [renamedServices] DotnetSettings renamedServices + * @property {Object.|null} [renamedResources] DotnetSettings renamedResources + * @property {Array.|null} [ignoredResources] DotnetSettings ignoredResources + * @property {Array.|null} [forcedNamespaceAliases] DotnetSettings forcedNamespaceAliases + * @property {Array.|null} [handwrittenSignatures] DotnetSettings handwrittenSignatures */ /** @@ -32271,6 +33883,11 @@ * @param {google.api.IDotnetSettings=} [properties] Properties to set */ function DotnetSettings(properties) { + this.renamedServices = {}; + this.renamedResources = {}; + this.ignoredResources = []; + this.forcedNamespaceAliases = []; + this.handwrittenSignatures = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -32285,6 +33902,46 @@ */ DotnetSettings.prototype.common = null; + /** + * DotnetSettings renamedServices. + * @member {Object.} renamedServices + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.renamedServices = $util.emptyObject; + + /** + * DotnetSettings renamedResources. + * @member {Object.} renamedResources + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.renamedResources = $util.emptyObject; + + /** + * DotnetSettings ignoredResources. + * @member {Array.} ignoredResources + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.ignoredResources = $util.emptyArray; + + /** + * DotnetSettings forcedNamespaceAliases. + * @member {Array.} forcedNamespaceAliases + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.forcedNamespaceAliases = $util.emptyArray; + + /** + * DotnetSettings handwrittenSignatures. + * @member {Array.} handwrittenSignatures + * @memberof google.api.DotnetSettings + * @instance + */ + DotnetSettings.prototype.handwrittenSignatures = $util.emptyArray; + /** * Creates a new DotnetSettings instance using the specified properties. * @function create @@ -32311,6 +33968,21 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.renamedServices != null && Object.hasOwnProperty.call(message, "renamedServices")) + for (var keys = Object.keys(message.renamedServices), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedServices[keys[i]]).ldelim(); + if (message.renamedResources != null && Object.hasOwnProperty.call(message, "renamedResources")) + for (var keys = Object.keys(message.renamedResources), i = 0; i < keys.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedResources[keys[i]]).ldelim(); + if (message.ignoredResources != null && message.ignoredResources.length) + for (var i = 0; i < message.ignoredResources.length; ++i) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.ignoredResources[i]); + if (message.forcedNamespaceAliases != null && message.forcedNamespaceAliases.length) + for (var i = 0; i < message.forcedNamespaceAliases.length; ++i) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.forcedNamespaceAliases[i]); + if (message.handwrittenSignatures != null && message.handwrittenSignatures.length) + for (var i = 0; i < message.handwrittenSignatures.length; ++i) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.handwrittenSignatures[i]); return writer; }; @@ -32341,7 +34013,7 @@ DotnetSettings.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.DotnetSettings(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.DotnetSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -32349,6 +34021,70 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + if (message.renamedServices === $util.emptyObject) + message.renamedServices = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.renamedServices[key] = value; + break; + } + case 3: { + if (message.renamedResources === $util.emptyObject) + message.renamedResources = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.renamedResources[key] = value; + break; + } + case 4: { + if (!(message.ignoredResources && message.ignoredResources.length)) + message.ignoredResources = []; + message.ignoredResources.push(reader.string()); + break; + } + case 5: { + if (!(message.forcedNamespaceAliases && message.forcedNamespaceAliases.length)) + message.forcedNamespaceAliases = []; + message.forcedNamespaceAliases.push(reader.string()); + break; + } + case 6: { + if (!(message.handwrittenSignatures && message.handwrittenSignatures.length)) + message.handwrittenSignatures = []; + message.handwrittenSignatures.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -32389,6 +34125,43 @@ if (error) return "common." + error; } + if (message.renamedServices != null && message.hasOwnProperty("renamedServices")) { + if (!$util.isObject(message.renamedServices)) + return "renamedServices: object expected"; + var key = Object.keys(message.renamedServices); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.renamedServices[key[i]])) + return "renamedServices: string{k:string} expected"; + } + if (message.renamedResources != null && message.hasOwnProperty("renamedResources")) { + if (!$util.isObject(message.renamedResources)) + return "renamedResources: object expected"; + var key = Object.keys(message.renamedResources); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.renamedResources[key[i]])) + return "renamedResources: string{k:string} expected"; + } + if (message.ignoredResources != null && message.hasOwnProperty("ignoredResources")) { + if (!Array.isArray(message.ignoredResources)) + return "ignoredResources: array expected"; + for (var i = 0; i < message.ignoredResources.length; ++i) + if (!$util.isString(message.ignoredResources[i])) + return "ignoredResources: string[] expected"; + } + if (message.forcedNamespaceAliases != null && message.hasOwnProperty("forcedNamespaceAliases")) { + if (!Array.isArray(message.forcedNamespaceAliases)) + return "forcedNamespaceAliases: array expected"; + for (var i = 0; i < message.forcedNamespaceAliases.length; ++i) + if (!$util.isString(message.forcedNamespaceAliases[i])) + return "forcedNamespaceAliases: string[] expected"; + } + if (message.handwrittenSignatures != null && message.hasOwnProperty("handwrittenSignatures")) { + if (!Array.isArray(message.handwrittenSignatures)) + return "handwrittenSignatures: array expected"; + for (var i = 0; i < message.handwrittenSignatures.length; ++i) + if (!$util.isString(message.handwrittenSignatures[i])) + return "handwrittenSignatures: string[] expected"; + } return null; }; @@ -32409,6 +34182,41 @@ throw TypeError(".google.api.DotnetSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.renamedServices) { + if (typeof object.renamedServices !== "object") + throw TypeError(".google.api.DotnetSettings.renamedServices: object expected"); + message.renamedServices = {}; + for (var keys = Object.keys(object.renamedServices), i = 0; i < keys.length; ++i) + message.renamedServices[keys[i]] = String(object.renamedServices[keys[i]]); + } + if (object.renamedResources) { + if (typeof object.renamedResources !== "object") + throw TypeError(".google.api.DotnetSettings.renamedResources: object expected"); + message.renamedResources = {}; + for (var keys = Object.keys(object.renamedResources), i = 0; i < keys.length; ++i) + message.renamedResources[keys[i]] = String(object.renamedResources[keys[i]]); + } + if (object.ignoredResources) { + if (!Array.isArray(object.ignoredResources)) + throw TypeError(".google.api.DotnetSettings.ignoredResources: array expected"); + message.ignoredResources = []; + for (var i = 0; i < object.ignoredResources.length; ++i) + message.ignoredResources[i] = String(object.ignoredResources[i]); + } + if (object.forcedNamespaceAliases) { + if (!Array.isArray(object.forcedNamespaceAliases)) + throw TypeError(".google.api.DotnetSettings.forcedNamespaceAliases: array expected"); + message.forcedNamespaceAliases = []; + for (var i = 0; i < object.forcedNamespaceAliases.length; ++i) + message.forcedNamespaceAliases[i] = String(object.forcedNamespaceAliases[i]); + } + if (object.handwrittenSignatures) { + if (!Array.isArray(object.handwrittenSignatures)) + throw TypeError(".google.api.DotnetSettings.handwrittenSignatures: array expected"); + message.handwrittenSignatures = []; + for (var i = 0; i < object.handwrittenSignatures.length; ++i) + message.handwrittenSignatures[i] = String(object.handwrittenSignatures[i]); + } return message; }; @@ -32425,10 +34233,45 @@ if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) { + object.ignoredResources = []; + object.forcedNamespaceAliases = []; + object.handwrittenSignatures = []; + } + if (options.objects || options.defaults) { + object.renamedServices = {}; + object.renamedResources = {}; + } if (options.defaults) object.common = null; if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + var keys2; + if (message.renamedServices && (keys2 = Object.keys(message.renamedServices)).length) { + object.renamedServices = {}; + for (var j = 0; j < keys2.length; ++j) + object.renamedServices[keys2[j]] = message.renamedServices[keys2[j]]; + } + if (message.renamedResources && (keys2 = Object.keys(message.renamedResources)).length) { + object.renamedResources = {}; + for (var j = 0; j < keys2.length; ++j) + object.renamedResources[keys2[j]] = message.renamedResources[keys2[j]]; + } + if (message.ignoredResources && message.ignoredResources.length) { + object.ignoredResources = []; + for (var j = 0; j < message.ignoredResources.length; ++j) + object.ignoredResources[j] = message.ignoredResources[j]; + } + if (message.forcedNamespaceAliases && message.forcedNamespaceAliases.length) { + object.forcedNamespaceAliases = []; + for (var j = 0; j < message.forcedNamespaceAliases.length; ++j) + object.forcedNamespaceAliases[j] = message.forcedNamespaceAliases[j]; + } + if (message.handwrittenSignatures && message.handwrittenSignatures.length) { + object.handwrittenSignatures = []; + for (var j = 0; j < message.handwrittenSignatures.length; ++j) + object.handwrittenSignatures[j] = message.handwrittenSignatures[j]; + } return object; }; @@ -33406,6 +35249,9 @@ * @property {number} ADS=2 ADS value * @property {number} PHOTOS=3 PHOTOS value * @property {number} STREET_VIEW=4 STREET_VIEW value + * @property {number} SHOPPING=5 SHOPPING value + * @property {number} GEO=6 GEO value + * @property {number} GENERATIVE_AI=7 GENERATIVE_AI value */ api.ClientLibraryOrganization = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -33414,6 +35260,9 @@ values[valuesById[2] = "ADS"] = 2; values[valuesById[3] = "PHOTOS"] = 3; values[valuesById[4] = "STREET_VIEW"] = 4; + values[valuesById[5] = "SHOPPING"] = 5; + values[valuesById[6] = "GEO"] = 6; + values[valuesById[7] = "GENERATIVE_AI"] = 7; return values; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 00f25be32f8..68dd1adaf1c 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1795,6 +1795,25 @@ "rule": "repeated", "type": "UninterpretedOption", "id": 999 + }, + "declaration": { + "rule": "repeated", + "type": "Declaration", + "id": 2, + "options": { + "retention": "RETENTION_SOURCE" + } + }, + "features": { + "type": "FeatureSet", + "id": 50 + }, + "verification": { + "type": "VerificationState", + "id": 3, + "options": { + "default": "UNVERIFIED" + } } }, "extensions": [ @@ -1802,7 +1821,45 @@ 1000, 536870911 ] - ] + ], + "nested": { + "Declaration": { + "fields": { + "number": { + "type": "int32", + "id": 1 + }, + "fullName": { + "type": "string", + "id": 2 + }, + "type": { + "type": "string", + "id": 3 + }, + "reserved": { + "type": "bool", + "id": 5 + }, + "repeated": { + "type": "bool", + "id": 6 + } + }, + "reserved": [ + [ + 4, + 4 + ] + ] + }, + "VerificationState": { + "values": { + "DECLARATION": 0, + "UNVERIFIED": 1 + } + } + } }, "FieldDescriptorProto": { "fields": { @@ -2115,6 +2172,10 @@ "type": "string", "id": 45 }, + "features": { + "type": "FeatureSet", + "id": 50 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2177,6 +2238,10 @@ "deprecated": true } }, + "features": { + "type": "FeatureSet", + "id": 12 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2271,9 +2336,22 @@ "type": "OptionRetention", "id": 17 }, - "target": { + "targets": { + "rule": "repeated", "type": "OptionTargetType", - "id": 18 + "id": 19, + "options": { + "packed": false + } + }, + "editionDefaults": { + "rule": "repeated", + "type": "EditionDefault", + "id": 20 + }, + "features": { + "type": "FeatureSet", + "id": 21 }, "uninterpretedOption": { "rule": "repeated", @@ -2291,6 +2369,10 @@ [ 4, 4 + ], + [ + 18, + 18 ] ], "nested": { @@ -2328,11 +2410,27 @@ "TARGET_TYPE_SERVICE": 8, "TARGET_TYPE_METHOD": 9 } + }, + "EditionDefault": { + "fields": { + "edition": { + "type": "string", + "id": 1 + }, + "value": { + "type": "string", + "id": 2 + } + } } } }, "OneofOptions": { "fields": { + "features": { + "type": "FeatureSet", + "id": 1 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2366,6 +2464,10 @@ "deprecated": true } }, + "features": { + "type": "FeatureSet", + "id": 7 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2394,6 +2496,17 @@ "default": false } }, + "features": { + "type": "FeatureSet", + "id": 2 + }, + "debugRedact": { + "type": "bool", + "id": 3, + "options": { + "default": false + } + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2409,6 +2522,10 @@ }, "ServiceOptions": { "fields": { + "features": { + "type": "FeatureSet", + "id": 34 + }, "deprecated": { "type": "bool", "id": 33, @@ -2445,6 +2562,10 @@ "default": "IDEMPOTENCY_UNKNOWN" } }, + "features": { + "type": "FeatureSet", + "id": 35 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -2516,6 +2637,137 @@ } } }, + "FeatureSet": { + "fields": { + "fieldPresence": { + "type": "FieldPresence", + "id": 1, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "EXPLICIT" + } + }, + "enumType": { + "type": "EnumType", + "id": 2, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "OPEN" + } + }, + "repeatedFieldEncoding": { + "type": "RepeatedFieldEncoding", + "id": 3, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "PACKED" + } + }, + "stringFieldValidation": { + "type": "StringFieldValidation", + "id": 4, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "MANDATORY" + } + }, + "messageEncoding": { + "type": "MessageEncoding", + "id": 5, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "LENGTH_PREFIXED" + } + }, + "jsonFormat": { + "type": "JsonFormat", + "id": 6, + "options": { + "retention": "RETENTION_RUNTIME", + "targets": "TARGET_TYPE_FILE", + "edition_defaults.edition": "2023", + "edition_defaults.value": "ALLOW" + } + }, + "rawFeatures": { + "type": "FeatureSet", + "id": 999, + "options": { + "targets": "TARGET_TYPE_UNKNOWN" + } + } + }, + "extensions": [ + [ + 1000, + 1000 + ], + [ + 1001, + 1001 + ], + [ + 9995, + 9999 + ] + ], + "nested": { + "FieldPresence": { + "values": { + "FIELD_PRESENCE_UNKNOWN": 0, + "EXPLICIT": 1, + "IMPLICIT": 2, + "LEGACY_REQUIRED": 3 + } + }, + "EnumType": { + "values": { + "ENUM_TYPE_UNKNOWN": 0, + "OPEN": 1, + "CLOSED": 2 + } + }, + "RepeatedFieldEncoding": { + "values": { + "REPEATED_FIELD_ENCODING_UNKNOWN": 0, + "PACKED": 1, + "EXPANDED": 2 + } + }, + "StringFieldValidation": { + "values": { + "STRING_FIELD_VALIDATION_UNKNOWN": 0, + "MANDATORY": 1, + "HINT": 2, + "NONE": 3 + } + }, + "MessageEncoding": { + "values": { + "MESSAGE_ENCODING_UNKNOWN": 0, + "LENGTH_PREFIXED": 1, + "DELIMITED": 2 + } + }, + "JsonFormat": { + "values": { + "JSON_FORMAT_UNKNOWN": 0, + "ALLOW": 1, + "LEGACY_BEST_EFFORT": 2 + } + } + } + }, "SourceCodeInfo": { "fields": { "location": { @@ -2929,6 +3181,10 @@ "rule": "repeated", "type": "ClientLibrarySettings", "id": 109 + }, + "protoReferenceDocumentationUri": { + "type": "string", + "id": 110 } } }, @@ -2986,6 +3242,31 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "renamedServices": { + "keyType": "string", + "type": "string", + "id": 2 + }, + "renamedResources": { + "keyType": "string", + "type": "string", + "id": 3 + }, + "ignoredResources": { + "rule": "repeated", + "type": "string", + "id": 4 + }, + "forcedNamespaceAliases": { + "rule": "repeated", + "type": "string", + "id": 5 + }, + "handwrittenSignatures": { + "rule": "repeated", + "type": "string", + "id": 6 } } }, @@ -3045,7 +3326,10 @@ "CLOUD": 1, "ADS": 2, "PHOTOS": 3, - "STREET_VIEW": 4 + "STREET_VIEW": 4, + "SHOPPING": 5, + "GEO": 6, + "GENERATIVE_AI": 7 } }, "ClientLibraryDestination": { From 305b292e4fc5863bc28727e594062159d1d5029c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 27 Sep 2023 13:21:29 -0700 Subject: [PATCH 238/333] chore(main): release 4.2.0 (#365) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.2.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 12 ++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...tadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...tadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 17 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 61d2585abf7..cce71a70342 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [4.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.1.0...v4.2.0) (2023-09-26) + + +### Features + +* Add default_missing_value_interpretation field; indicate KMS_SERVICE_ERROR is retryable ([#347](https://github.com/googleapis/nodejs-bigquery-storage/issues/347)) ([07794c1](https://github.com/googleapis/nodejs-bigquery-storage/commit/07794c12459f850bfc68df55937ca54fec65ea97)) + + +### Bug Fixes + +* Struct field names should not be lowercase ([#364](https://github.com/googleapis/nodejs-bigquery-storage/issues/364)) ([1867062](https://github.com/googleapis/nodejs-bigquery-storage/commit/18670627cbebf57c139036a7a949ace599606eb0)) + ## [4.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.0.0...v4.1.0) (2023-08-11) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 8eac4f7b0b9..38288e21ea0 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.1.0", + "version": "4.2.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index dc7e6a04e83..ed8e3dc17b5 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.1.0", + "version": "4.2.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index e45697aeb8e..e538f1b195b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.1.0", + "version": "4.2.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 177857f71b8..80fbbfc0e61 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.1.0", + "version": "4.2.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 177857f71b8..80fbbfc0e61 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.1.0", + "version": "4.2.0", "language": "TYPESCRIPT", "apis": [ { From 7f23c0bdceb3e76222960076d6571dc85a174c85 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 27 Sep 2023 22:41:40 +0200 Subject: [PATCH 239/333] chore(deps): update dependency sinon to v16 (#371) Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 38288e21ea0..efbfb4cfb58 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -45,7 +45,7 @@ "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^1.0.0-2", - "sinon": "^15.0.0", + "sinon": "^16.0.0", "ts-loader": "^9.0.0", "uuid": "^9.0.0", "typescript": "^5.1.6", From 0e2cc90f07e6fc2002cfc62bc464cde725665ff9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 27 Sep 2023 13:54:21 -0700 Subject: [PATCH 240/333] build: update typescript generator version to publish in dual format (ESM) (#367) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: simplify logic for HTTP/1.1 REST fallback option For the `fallback` parameter, all values considered as `true` in Boolean context will enable HTTP/1.1 REST fallback, since the other fallback transport, proto over HTTP, is removed from `google-gax` v4. PiperOrigin-RevId: 559812260 Source-Link: https://github.com/googleapis/googleapis/commit/6a6fd29a79fe2846001d90d93e79a19fcc303b85 Source-Link: https://github.com/googleapis/googleapis-gen/commit/56c16657e7a59122b1da94771a9ef40989c282c0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTZjMTY2NTdlN2E1OTEyMmIxZGE5NDc3MWE5ZWY0MDk4OWMyODJjMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * build: update typescript generator version to publish in dual format (ESM) PiperOrigin-RevId: 568643156 Source-Link: https://github.com/googleapis/googleapis/commit/f95afc063e20a0a61e13b186806ac84b49e329cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/bbd2c49d2e423a8ce5cc85627402d512aeefc58b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmJkMmM0OWQyZTQyM2E4Y2U1Y2M4NTYyNzQwMmQ1MTJhZWVmYzU4YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- .../bigquery-storage/src/v1/big_query_read_client.ts | 9 ++++----- .../bigquery-storage/src/v1/big_query_write_client.ts | 9 ++++----- .../src/v1beta1/big_query_storage_client.ts | 9 ++++----- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 83366b79cfe..e0dbfcad645 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -90,8 +90,7 @@ export class BigQueryReadClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you @@ -99,7 +98,7 @@ export class BigQueryReadClient { * HTTP implementation. Load only fallback version and pass it to the constructor: * ``` * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryReadClient({fallback: 'rest'}, gax); + * const client = new BigQueryReadClient({fallback: true}, gax); * ``` */ constructor( @@ -162,7 +161,7 @@ export class BigQueryReadClient { } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { + } else { clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { @@ -197,7 +196,7 @@ export class BigQueryReadClient { this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, - opts.fallback === 'rest' + !!opts.fallback ), }; diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index a2c56ea5ae7..0147a17887c 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -93,8 +93,7 @@ export class BigQueryWriteClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you @@ -102,7 +101,7 @@ export class BigQueryWriteClient { * HTTP implementation. Load only fallback version and pass it to the constructor: * ``` * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryWriteClient({fallback: 'rest'}, gax); + * const client = new BigQueryWriteClient({fallback: true}, gax); * ``` */ constructor( @@ -165,7 +164,7 @@ export class BigQueryWriteClient { } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { + } else { clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { @@ -200,7 +199,7 @@ export class BigQueryWriteClient { this.descriptors.stream = { appendRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, - opts.fallback === 'rest' + !!opts.fallback ), }; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 97a19a7a486..afcf322614b 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -95,8 +95,7 @@ export class BigQueryStorageClient { * API remote host. * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you @@ -104,7 +103,7 @@ export class BigQueryStorageClient { * HTTP implementation. Load only fallback version and pass it to the constructor: * ``` * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new BigQueryStorageClient({fallback: 'rest'}, gax); + * const client = new BigQueryStorageClient({fallback: true}, gax); * ``` */ constructor( @@ -167,7 +166,7 @@ export class BigQueryStorageClient { } if (!opts.fallback) { clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest') { + } else { clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); } if (opts.libName && opts.libVersion) { @@ -196,7 +195,7 @@ export class BigQueryStorageClient { this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, - opts.fallback === 'rest' + !!opts.fallback ), }; From b243797feed17a1d44554db55d83545ad1913901 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 27 Sep 2023 23:02:36 +0200 Subject: [PATCH 241/333] chore(deps): update dependency pack-n-play to v2 (#378) Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index efbfb4cfb58..d9a6d2990b9 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -44,7 +44,7 @@ "linkinator": "^5.0.0", "mocha": "^9.2.2", "null-loader": "^4.0.0", - "pack-n-play": "^1.0.0-2", + "pack-n-play": "^2.0.0", "sinon": "^16.0.0", "ts-loader": "^9.0.0", "uuid": "^9.0.0", From 13a55bf4edf26b7fb6ed5f39483c4779228d0316 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 27 Sep 2023 18:36:41 -0400 Subject: [PATCH 242/333] docs: add samples for JSONWriter with Default and Committed streams (#366) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: add samples for JSONWriter with Default and Committed streams * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- handwritten/bigquery-storage/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 220dc7e4183..c944a95422c 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -186,6 +186,8 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | | Append_rows_buffered | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_buffered.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_buffered.js,samples/README.md) | +| Append_rows_json_writer_commited | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_commited.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_commited.js,samples/README.md) | +| Append_rows_json_writer_default | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_default.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_default.js,samples/README.md) | | Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | | Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | | Append_rows_table_to_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_table_to_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_table_to_proto2.js,samples/README.md) | From cf41fdff13bd0aa125d7ed49f9d3204b40b77d26 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 11 Oct 2023 09:49:00 -0400 Subject: [PATCH 243/333] chore(nodejs): Add `system-test/fixtures` to `.eslintignore` (#381) * fix: Add `system-test/fixtures` to `.eslintignore` * refactor: Use `**` Source-Link: https://github.com/googleapis/synthtool/commit/b7858ba70e8acabc89d13558a71dd9318a57034a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:abc68a9bbf4fa808b25fa16d3b11141059dc757dbc34f024744bba36c200b40f Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.eslintignore | 1 + handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.eslintignore b/handwritten/bigquery-storage/.eslintignore index ea5b04aebe6..c4a0963e9bd 100644 --- a/handwritten/bigquery-storage/.eslintignore +++ b/handwritten/bigquery-storage/.eslintignore @@ -5,3 +5,4 @@ build/ docs/ protos/ samples/generated/ +system-test/**/fixtures diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 807a8916118..40b49d2bf81 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:8b6a07a38d1583d96b6e251ba208bd4ef0bc2a0cc37471ffc518841651d15bd6 -# created: 2023-09-25T22:18:27.595486267Z + digest: sha256:abc68a9bbf4fa808b25fa16d3b11141059dc757dbc34f024744bba36c200b40f +# created: 2023-10-04T20:56:40.710775365Z From 2f6063a12274ca6aa37f3e0a697511fbc8df22c8 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 2 Nov 2023 18:45:25 +0100 Subject: [PATCH 244/333] chore(deps): update dependency sinon to v17 (#390) Co-authored-by: Lo Ferris <50979514+loferris@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d9a6d2990b9..f9953a2f36f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -45,7 +45,7 @@ "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^2.0.0", - "sinon": "^16.0.0", + "sinon": "^17.0.0", "ts-loader": "^9.0.0", "uuid": "^9.0.0", "typescript": "^5.1.6", From 0a9b1e3b838e143bfafd2547d6ed6126ee082440 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 22 Jan 2024 15:12:55 -0400 Subject: [PATCH 245/333] fix: force import protobufjs/ext/descriptor package (#407) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: lint issue * fix: protobuf import * fix: patch protobuf module declaration * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: add more comments around protobufjs fix --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/package.json | 3 + .../bigquery-storage/protos/protos.d.ts | 282 ++++- handwritten/bigquery-storage/protos/protos.js | 1085 +++++++++++++++-- .../bigquery-storage/protos/protos.json | 118 +- handwritten/bigquery-storage/src/index.ts | 9 + .../src/managedwriter/json_writer.ts | 5 +- .../bigquery-storage/src/protobuf/index.ts | 27 + .../system-test/managed_writer_client_test.ts | 7 +- .../bigquery-storage/test/adapt/proto.ts | 6 +- 9 files changed, 1349 insertions(+), 193 deletions(-) create mode 100644 handwritten/bigquery-storage/src/protobuf/index.ts diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index f9953a2f36f..a6c20149ef6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -29,6 +29,9 @@ "dependencies": { "google-gax": "^4.0.3" }, + "peerDependencies": { + "protobufjs": "^7.2.4" + }, "devDependencies": { "@google-cloud/bigquery": "^7.0.0", "@types/uuid": "^9.0.1", diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 86514321264..ba25aecc488 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -6871,6 +6871,21 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** Edition enum. */ + enum Edition { + EDITION_UNKNOWN = 0, + EDITION_PROTO2 = 998, + EDITION_PROTO3 = 999, + EDITION_2023 = 1000, + EDITION_2024 = 1001, + EDITION_1_TEST_ONLY = 1, + EDITION_2_TEST_ONLY = 2, + EDITION_99997_TEST_ONLY = 99997, + EDITION_99998_TEST_ONLY = 99998, + EDITION_99999_TEST_ONLY = 99999, + EDITION_MAX = 2147483647 + } + /** Properties of a FileDescriptorProto. */ interface IFileDescriptorProto { @@ -6911,7 +6926,7 @@ export namespace google { syntax?: (string|null); /** FileDescriptorProto edition */ - edition?: (string|null); + edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); } /** Represents a FileDescriptorProto. */ @@ -6960,7 +6975,7 @@ export namespace google { public syntax: string; /** FileDescriptorProto edition. */ - public edition: string; + public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); /** * Creates a new FileDescriptorProto instance using the specified properties. @@ -7835,8 +7850,8 @@ export namespace google { /** Label enum. */ enum Label { LABEL_OPTIONAL = 1, - LABEL_REQUIRED = 2, - LABEL_REPEATED = 3 + LABEL_REPEATED = 3, + LABEL_REQUIRED = 2 } } @@ -8548,9 +8563,6 @@ export namespace google { /** FileOptions pyGenericServices */ pyGenericServices?: (boolean|null); - /** FileOptions phpGenericServices */ - phpGenericServices?: (boolean|null); - /** FileOptions deprecated */ deprecated?: (boolean|null); @@ -8627,9 +8639,6 @@ export namespace google { /** FileOptions pyGenericServices. */ public pyGenericServices: boolean; - /** FileOptions phpGenericServices. */ - public phpGenericServices: boolean; - /** FileOptions deprecated. */ public deprecated: boolean; @@ -9106,7 +9115,7 @@ export namespace google { interface IEditionDefault { /** EditionDefault edition */ - edition?: (string|null); + edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); /** EditionDefault value */ value?: (string|null); @@ -9122,7 +9131,7 @@ export namespace google { constructor(properties?: google.protobuf.FieldOptions.IEditionDefault); /** EditionDefault edition. */ - public edition: string; + public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); /** EditionDefault value. */ public value: string; @@ -10042,17 +10051,14 @@ export namespace google { /** FeatureSet repeatedFieldEncoding */ repeatedFieldEncoding?: (google.protobuf.FeatureSet.RepeatedFieldEncoding|keyof typeof google.protobuf.FeatureSet.RepeatedFieldEncoding|null); - /** FeatureSet stringFieldValidation */ - stringFieldValidation?: (google.protobuf.FeatureSet.StringFieldValidation|keyof typeof google.protobuf.FeatureSet.StringFieldValidation|null); + /** FeatureSet utf8Validation */ + utf8Validation?: (google.protobuf.FeatureSet.Utf8Validation|keyof typeof google.protobuf.FeatureSet.Utf8Validation|null); /** FeatureSet messageEncoding */ messageEncoding?: (google.protobuf.FeatureSet.MessageEncoding|keyof typeof google.protobuf.FeatureSet.MessageEncoding|null); /** FeatureSet jsonFormat */ jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); - - /** FeatureSet rawFeatures */ - rawFeatures?: (google.protobuf.IFeatureSet|null); } /** Represents a FeatureSet. */ @@ -10073,8 +10079,8 @@ export namespace google { /** FeatureSet repeatedFieldEncoding. */ public repeatedFieldEncoding: (google.protobuf.FeatureSet.RepeatedFieldEncoding|keyof typeof google.protobuf.FeatureSet.RepeatedFieldEncoding); - /** FeatureSet stringFieldValidation. */ - public stringFieldValidation: (google.protobuf.FeatureSet.StringFieldValidation|keyof typeof google.protobuf.FeatureSet.StringFieldValidation); + /** FeatureSet utf8Validation. */ + public utf8Validation: (google.protobuf.FeatureSet.Utf8Validation|keyof typeof google.protobuf.FeatureSet.Utf8Validation); /** FeatureSet messageEncoding. */ public messageEncoding: (google.protobuf.FeatureSet.MessageEncoding|keyof typeof google.protobuf.FeatureSet.MessageEncoding); @@ -10082,9 +10088,6 @@ export namespace google { /** FeatureSet jsonFormat. */ public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); - /** FeatureSet rawFeatures. */ - public rawFeatures?: (google.protobuf.IFeatureSet|null); - /** * Creates a new FeatureSet instance using the specified properties. * @param [properties] Properties to set @@ -10187,11 +10190,10 @@ export namespace google { EXPANDED = 2 } - /** StringFieldValidation enum. */ - enum StringFieldValidation { - STRING_FIELD_VALIDATION_UNKNOWN = 0, - MANDATORY = 1, - HINT = 2, + /** Utf8Validation enum. */ + enum Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0, + VERIFY = 2, NONE = 3 } @@ -10210,6 +10212,221 @@ export namespace google { } } + /** Properties of a FeatureSetDefaults. */ + interface IFeatureSetDefaults { + + /** FeatureSetDefaults defaults */ + defaults?: (google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault[]|null); + + /** FeatureSetDefaults minimumEdition */ + minimumEdition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSetDefaults maximumEdition */ + maximumEdition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + } + + /** Represents a FeatureSetDefaults. */ + class FeatureSetDefaults implements IFeatureSetDefaults { + + /** + * Constructs a new FeatureSetDefaults. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFeatureSetDefaults); + + /** FeatureSetDefaults defaults. */ + public defaults: google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault[]; + + /** FeatureSetDefaults minimumEdition. */ + public minimumEdition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSetDefaults maximumEdition. */ + public maximumEdition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** + * Creates a new FeatureSetDefaults instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSetDefaults instance + */ + public static create(properties?: google.protobuf.IFeatureSetDefaults): google.protobuf.FeatureSetDefaults; + + /** + * Encodes the specified FeatureSetDefaults message. Does not implicitly {@link google.protobuf.FeatureSetDefaults.verify|verify} messages. + * @param message FeatureSetDefaults message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFeatureSetDefaults, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSetDefaults message, length delimited. Does not implicitly {@link google.protobuf.FeatureSetDefaults.verify|verify} messages. + * @param message FeatureSetDefaults message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFeatureSetDefaults, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSetDefaults message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSetDefaults + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSetDefaults; + + /** + * Decodes a FeatureSetDefaults message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSetDefaults + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSetDefaults; + + /** + * Verifies a FeatureSetDefaults message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSetDefaults message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSetDefaults + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSetDefaults; + + /** + * Creates a plain object from a FeatureSetDefaults message. Also converts values to other types if specified. + * @param message FeatureSetDefaults + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSetDefaults, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSetDefaults to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSetDefaults + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace FeatureSetDefaults { + + /** Properties of a FeatureSetEditionDefault. */ + interface IFeatureSetEditionDefault { + + /** FeatureSetEditionDefault edition */ + edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSetEditionDefault features */ + features?: (google.protobuf.IFeatureSet|null); + } + + /** Represents a FeatureSetEditionDefault. */ + class FeatureSetEditionDefault implements IFeatureSetEditionDefault { + + /** + * Constructs a new FeatureSetEditionDefault. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault); + + /** FeatureSetEditionDefault edition. */ + public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSetEditionDefault features. */ + public features?: (google.protobuf.IFeatureSet|null); + + /** + * Creates a new FeatureSetEditionDefault instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSetEditionDefault instance + */ + public static create(properties?: google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault): google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault; + + /** + * Encodes the specified FeatureSetEditionDefault message. Does not implicitly {@link google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.verify|verify} messages. + * @param message FeatureSetEditionDefault message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSetEditionDefault message, length delimited. Does not implicitly {@link google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.verify|verify} messages. + * @param message FeatureSetEditionDefault message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSetEditionDefault message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSetEditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault; + + /** + * Decodes a FeatureSetEditionDefault message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSetEditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault; + + /** + * Verifies a FeatureSetEditionDefault message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSetEditionDefault message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSetEditionDefault + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault; + + /** + * Creates a plain object from a FeatureSetEditionDefault message. Also converts values to other types if specified. + * @param message FeatureSetEditionDefault + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSetEditionDefault to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSetEditionDefault + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Properties of a SourceCodeInfo. */ interface ISourceCodeInfo { @@ -13536,6 +13753,9 @@ export namespace google { /** MethodSettings longRunning */ longRunning?: (google.api.MethodSettings.ILongRunning|null); + + /** MethodSettings autoPopulatedFields */ + autoPopulatedFields?: (string[]|null); } /** Represents a MethodSettings. */ @@ -13553,6 +13773,9 @@ export namespace google { /** MethodSettings longRunning. */ public longRunning?: (google.api.MethodSettings.ILongRunning|null); + /** MethodSettings autoPopulatedFields. */ + public autoPopulatedFields: string[]; + /** * Creates a new MethodSettings instance using the specified properties. * @param [properties] Properties to set @@ -13789,7 +14012,8 @@ export namespace google { INPUT_ONLY = 4, IMMUTABLE = 5, UNORDERED_LIST = 6, - NON_EMPTY_DEFAULT = 7 + NON_EMPTY_DEFAULT = 7, + IDENTIFIER = 8 } /** Properties of a ResourceDescriptor. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 71a78d7ed87..a42af655309 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16703,6 +16703,38 @@ return FileDescriptorSet; })(); + /** + * Edition enum. + * @name google.protobuf.Edition + * @enum {number} + * @property {number} EDITION_UNKNOWN=0 EDITION_UNKNOWN value + * @property {number} EDITION_PROTO2=998 EDITION_PROTO2 value + * @property {number} EDITION_PROTO3=999 EDITION_PROTO3 value + * @property {number} EDITION_2023=1000 EDITION_2023 value + * @property {number} EDITION_2024=1001 EDITION_2024 value + * @property {number} EDITION_1_TEST_ONLY=1 EDITION_1_TEST_ONLY value + * @property {number} EDITION_2_TEST_ONLY=2 EDITION_2_TEST_ONLY value + * @property {number} EDITION_99997_TEST_ONLY=99997 EDITION_99997_TEST_ONLY value + * @property {number} EDITION_99998_TEST_ONLY=99998 EDITION_99998_TEST_ONLY value + * @property {number} EDITION_99999_TEST_ONLY=99999 EDITION_99999_TEST_ONLY value + * @property {number} EDITION_MAX=2147483647 EDITION_MAX value + */ + protobuf.Edition = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "EDITION_UNKNOWN"] = 0; + values[valuesById[998] = "EDITION_PROTO2"] = 998; + values[valuesById[999] = "EDITION_PROTO3"] = 999; + values[valuesById[1000] = "EDITION_2023"] = 1000; + values[valuesById[1001] = "EDITION_2024"] = 1001; + values[valuesById[1] = "EDITION_1_TEST_ONLY"] = 1; + values[valuesById[2] = "EDITION_2_TEST_ONLY"] = 2; + values[valuesById[99997] = "EDITION_99997_TEST_ONLY"] = 99997; + values[valuesById[99998] = "EDITION_99998_TEST_ONLY"] = 99998; + values[valuesById[99999] = "EDITION_99999_TEST_ONLY"] = 99999; + values[valuesById[2147483647] = "EDITION_MAX"] = 2147483647; + return values; + })(); + protobuf.FileDescriptorProto = (function() { /** @@ -16721,7 +16753,7 @@ * @property {google.protobuf.IFileOptions|null} [options] FileDescriptorProto options * @property {google.protobuf.ISourceCodeInfo|null} [sourceCodeInfo] FileDescriptorProto sourceCodeInfo * @property {string|null} [syntax] FileDescriptorProto syntax - * @property {string|null} [edition] FileDescriptorProto edition + * @property {google.protobuf.Edition|null} [edition] FileDescriptorProto edition */ /** @@ -16844,11 +16876,11 @@ /** * FileDescriptorProto edition. - * @member {string} edition + * @member {google.protobuf.Edition} edition * @memberof google.protobuf.FileDescriptorProto * @instance */ - FileDescriptorProto.prototype.edition = ""; + FileDescriptorProto.prototype.edition = 0; /** * Creates a new FileDescriptorProto instance using the specified properties. @@ -16906,7 +16938,7 @@ if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) - writer.uint32(/* id 13, wireType 2 =*/106).string(message.edition); + writer.uint32(/* id 14, wireType 0 =*/112).int32(message.edition); return writer; }; @@ -17013,8 +17045,8 @@ message.syntax = reader.string(); break; } - case 13: { - message.edition = reader.string(); + case 14: { + message.edition = reader.int32(); break; } default: @@ -17129,8 +17161,22 @@ if (!$util.isString(message.syntax)) return "syntax: string expected"; if (message.edition != null && message.hasOwnProperty("edition")) - if (!$util.isString(message.edition)) - return "edition: string expected"; + switch (message.edition) { + default: + return "edition: enum value expected"; + case 0: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } return null; }; @@ -17223,8 +17269,58 @@ } if (object.syntax != null) message.syntax = String(object.syntax); - if (object.edition != null) - message.edition = String(object.edition); + switch (object.edition) { + default: + if (typeof object.edition === "number") { + message.edition = object.edition; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.edition = 0; + break; + case "EDITION_PROTO2": + case 998: + message.edition = 998; + break; + case "EDITION_PROTO3": + case 999: + message.edition = 999; + break; + case "EDITION_2023": + case 1000: + message.edition = 1000; + break; + case "EDITION_2024": + case 1001: + message.edition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.edition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.edition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.edition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.edition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.edition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.edition = 2147483647; + break; + } return message; }; @@ -17256,7 +17352,7 @@ object.options = null; object.sourceCodeInfo = null; object.syntax = ""; - object.edition = ""; + object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -17304,7 +17400,7 @@ if (message.syntax != null && message.hasOwnProperty("syntax")) object.syntax = message.syntax; if (message.edition != null && message.hasOwnProperty("edition")) - object.edition = message.edition; + object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; return object; }; @@ -19343,8 +19439,8 @@ default: return "label: enum value expected"; case 1: - case 2: case 3: + case 2: break; } if (message.type != null && message.hasOwnProperty("type")) @@ -19424,14 +19520,14 @@ case 1: message.label = 1; break; - case "LABEL_REQUIRED": - case 2: - message.label = 2; - break; case "LABEL_REPEATED": case 3: message.label = 3; break; + case "LABEL_REQUIRED": + case 2: + message.label = 2; + break; } switch (object.type) { default: @@ -19661,14 +19757,14 @@ * @name google.protobuf.FieldDescriptorProto.Label * @enum {number} * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value - * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value + * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value */ FieldDescriptorProto.Label = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[1] = "LABEL_OPTIONAL"] = 1; - values[valuesById[2] = "LABEL_REQUIRED"] = 2; values[valuesById[3] = "LABEL_REPEATED"] = 3; + values[valuesById[2] = "LABEL_REQUIRED"] = 2; return values; })(); @@ -21367,7 +21463,6 @@ * @property {boolean|null} [ccGenericServices] FileOptions ccGenericServices * @property {boolean|null} [javaGenericServices] FileOptions javaGenericServices * @property {boolean|null} [pyGenericServices] FileOptions pyGenericServices - * @property {boolean|null} [phpGenericServices] FileOptions phpGenericServices * @property {boolean|null} [deprecated] FileOptions deprecated * @property {boolean|null} [ccEnableArenas] FileOptions ccEnableArenas * @property {string|null} [objcClassPrefix] FileOptions objcClassPrefix @@ -21479,14 +21574,6 @@ */ FileOptions.prototype.pyGenericServices = false; - /** - * FileOptions phpGenericServices. - * @member {boolean} phpGenericServices - * @memberof google.protobuf.FileOptions - * @instance - */ - FileOptions.prototype.phpGenericServices = false; - /** * FileOptions deprecated. * @member {boolean} deprecated @@ -21641,8 +21728,6 @@ writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); - if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) - writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) @@ -21729,10 +21814,6 @@ message.pyGenericServices = reader.bool(); break; } - case 42: { - message.phpGenericServices = reader.bool(); - break; - } case 23: { message.deprecated = reader.bool(); break; @@ -21856,9 +21937,6 @@ if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) if (typeof message.pyGenericServices !== "boolean") return "pyGenericServices: boolean expected"; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - if (typeof message.phpGenericServices !== "boolean") - return "phpGenericServices: boolean expected"; if (message.deprecated != null && message.hasOwnProperty("deprecated")) if (typeof message.deprecated !== "boolean") return "deprecated: boolean expected"; @@ -21962,8 +22040,6 @@ message.javaGenericServices = Boolean(object.javaGenericServices); if (object.pyGenericServices != null) message.pyGenericServices = Boolean(object.pyGenericServices); - if (object.phpGenericServices != null) - message.phpGenericServices = Boolean(object.phpGenericServices); if (object.deprecated != null) message.deprecated = Boolean(object.deprecated); if (object.ccEnableArenas != null) @@ -22045,7 +22121,6 @@ object.swiftPrefix = ""; object.phpClassPrefix = ""; object.phpNamespace = ""; - object.phpGenericServices = false; object.phpMetadataNamespace = ""; object.rubyPackage = ""; object.features = null; @@ -22084,8 +22159,6 @@ object.phpClassPrefix = message.phpClassPrefix; if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) object.phpNamespace = message.phpNamespace; - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) - object.phpGenericServices = message.phpGenericServices; if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) object.phpMetadataNamespace = message.phpMetadataNamespace; if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) @@ -23038,6 +23111,7 @@ case 5: case 6: case 7: + case 8: break; } } @@ -23256,6 +23330,10 @@ case 7: message[".google.api.fieldBehavior"][i] = 7; break; + case "IDENTIFIER": + case 8: + message[".google.api.fieldBehavior"][i] = 8; + break; } } if (object[".google.api.resourceReference"] != null) { @@ -23456,7 +23534,7 @@ * Properties of an EditionDefault. * @memberof google.protobuf.FieldOptions * @interface IEditionDefault - * @property {string|null} [edition] EditionDefault edition + * @property {google.protobuf.Edition|null} [edition] EditionDefault edition * @property {string|null} [value] EditionDefault value */ @@ -23477,11 +23555,11 @@ /** * EditionDefault edition. - * @member {string} edition + * @member {google.protobuf.Edition} edition * @memberof google.protobuf.FieldOptions.EditionDefault * @instance */ - EditionDefault.prototype.edition = ""; + EditionDefault.prototype.edition = 0; /** * EditionDefault value. @@ -23515,10 +23593,10 @@ EditionDefault.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.edition); if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.value); + if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); return writer; }; @@ -23553,8 +23631,8 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: { - message.edition = reader.string(); + case 3: { + message.edition = reader.int32(); break; } case 2: { @@ -23597,8 +23675,22 @@ if (typeof message !== "object" || message === null) return "object expected"; if (message.edition != null && message.hasOwnProperty("edition")) - if (!$util.isString(message.edition)) - return "edition: string expected"; + switch (message.edition) { + default: + return "edition: enum value expected"; + case 0: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } if (message.value != null && message.hasOwnProperty("value")) if (!$util.isString(message.value)) return "value: string expected"; @@ -23617,8 +23709,58 @@ if (object instanceof $root.google.protobuf.FieldOptions.EditionDefault) return object; var message = new $root.google.protobuf.FieldOptions.EditionDefault(); - if (object.edition != null) - message.edition = String(object.edition); + switch (object.edition) { + default: + if (typeof object.edition === "number") { + message.edition = object.edition; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.edition = 0; + break; + case "EDITION_PROTO2": + case 998: + message.edition = 998; + break; + case "EDITION_PROTO3": + case 999: + message.edition = 999; + break; + case "EDITION_2023": + case 1000: + message.edition = 1000; + break; + case "EDITION_2024": + case 1001: + message.edition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.edition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.edition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.edition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.edition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.edition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.edition = 2147483647; + break; + } if (object.value != null) message.value = String(object.value); return message; @@ -23638,13 +23780,13 @@ options = {}; var object = {}; if (options.defaults) { - object.edition = ""; object.value = ""; + object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; } - if (message.edition != null && message.hasOwnProperty("edition")) - object.edition = message.edition; if (message.value != null && message.hasOwnProperty("value")) object.value = message.value; + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; return object; }; @@ -25924,10 +26066,9 @@ * @property {google.protobuf.FeatureSet.FieldPresence|null} [fieldPresence] FeatureSet fieldPresence * @property {google.protobuf.FeatureSet.EnumType|null} [enumType] FeatureSet enumType * @property {google.protobuf.FeatureSet.RepeatedFieldEncoding|null} [repeatedFieldEncoding] FeatureSet repeatedFieldEncoding - * @property {google.protobuf.FeatureSet.StringFieldValidation|null} [stringFieldValidation] FeatureSet stringFieldValidation + * @property {google.protobuf.FeatureSet.Utf8Validation|null} [utf8Validation] FeatureSet utf8Validation * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat - * @property {google.protobuf.IFeatureSet|null} [rawFeatures] FeatureSet rawFeatures */ /** @@ -25970,12 +26111,12 @@ FeatureSet.prototype.repeatedFieldEncoding = 0; /** - * FeatureSet stringFieldValidation. - * @member {google.protobuf.FeatureSet.StringFieldValidation} stringFieldValidation + * FeatureSet utf8Validation. + * @member {google.protobuf.FeatureSet.Utf8Validation} utf8Validation * @memberof google.protobuf.FeatureSet * @instance */ - FeatureSet.prototype.stringFieldValidation = 0; + FeatureSet.prototype.utf8Validation = 0; /** * FeatureSet messageEncoding. @@ -25993,14 +26134,6 @@ */ FeatureSet.prototype.jsonFormat = 0; - /** - * FeatureSet rawFeatures. - * @member {google.protobuf.IFeatureSet|null|undefined} rawFeatures - * @memberof google.protobuf.FeatureSet - * @instance - */ - FeatureSet.prototype.rawFeatures = null; - /** * Creates a new FeatureSet instance using the specified properties. * @function create @@ -26031,14 +26164,12 @@ writer.uint32(/* id 2, wireType 0 =*/16).int32(message.enumType); if (message.repeatedFieldEncoding != null && Object.hasOwnProperty.call(message, "repeatedFieldEncoding")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.repeatedFieldEncoding); - if (message.stringFieldValidation != null && Object.hasOwnProperty.call(message, "stringFieldValidation")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.stringFieldValidation); + if (message.utf8Validation != null && Object.hasOwnProperty.call(message, "utf8Validation")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.utf8Validation); if (message.messageEncoding != null && Object.hasOwnProperty.call(message, "messageEncoding")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); - if (message.rawFeatures != null && Object.hasOwnProperty.call(message, "rawFeatures")) - $root.google.protobuf.FeatureSet.encode(message.rawFeatures, writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); return writer; }; @@ -26086,7 +26217,7 @@ break; } case 4: { - message.stringFieldValidation = reader.int32(); + message.utf8Validation = reader.int32(); break; } case 5: { @@ -26097,10 +26228,6 @@ message.jsonFormat = reader.int32(); break; } - case 999: { - message.rawFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); - break; - } default: reader.skipType(tag & 7); break; @@ -26164,12 +26291,11 @@ case 2: break; } - if (message.stringFieldValidation != null && message.hasOwnProperty("stringFieldValidation")) - switch (message.stringFieldValidation) { + if (message.utf8Validation != null && message.hasOwnProperty("utf8Validation")) + switch (message.utf8Validation) { default: - return "stringFieldValidation: enum value expected"; + return "utf8Validation: enum value expected"; case 0: - case 1: case 2: case 3: break; @@ -26192,11 +26318,6 @@ case 2: break; } - if (message.rawFeatures != null && message.hasOwnProperty("rawFeatures")) { - var error = $root.google.protobuf.FeatureSet.verify(message.rawFeatures); - if (error) - return "rawFeatures." + error; - } return null; }; @@ -26276,28 +26397,24 @@ message.repeatedFieldEncoding = 2; break; } - switch (object.stringFieldValidation) { + switch (object.utf8Validation) { default: - if (typeof object.stringFieldValidation === "number") { - message.stringFieldValidation = object.stringFieldValidation; + if (typeof object.utf8Validation === "number") { + message.utf8Validation = object.utf8Validation; break; } break; - case "STRING_FIELD_VALIDATION_UNKNOWN": + case "UTF8_VALIDATION_UNKNOWN": case 0: - message.stringFieldValidation = 0; - break; - case "MANDATORY": - case 1: - message.stringFieldValidation = 1; + message.utf8Validation = 0; break; - case "HINT": + case "VERIFY": case 2: - message.stringFieldValidation = 2; + message.utf8Validation = 2; break; case "NONE": case 3: - message.stringFieldValidation = 3; + message.utf8Validation = 3; break; } switch (object.messageEncoding) { @@ -26340,11 +26457,6 @@ message.jsonFormat = 2; break; } - if (object.rawFeatures != null) { - if (typeof object.rawFeatures !== "object") - throw TypeError(".google.protobuf.FeatureSet.rawFeatures: object expected"); - message.rawFeatures = $root.google.protobuf.FeatureSet.fromObject(object.rawFeatures); - } return message; }; @@ -26365,10 +26477,9 @@ object.fieldPresence = options.enums === String ? "FIELD_PRESENCE_UNKNOWN" : 0; object.enumType = options.enums === String ? "ENUM_TYPE_UNKNOWN" : 0; object.repeatedFieldEncoding = options.enums === String ? "REPEATED_FIELD_ENCODING_UNKNOWN" : 0; - object.stringFieldValidation = options.enums === String ? "STRING_FIELD_VALIDATION_UNKNOWN" : 0; + object.utf8Validation = options.enums === String ? "UTF8_VALIDATION_UNKNOWN" : 0; object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; - object.rawFeatures = null; } if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; @@ -26376,14 +26487,12 @@ object.enumType = options.enums === String ? $root.google.protobuf.FeatureSet.EnumType[message.enumType] === undefined ? message.enumType : $root.google.protobuf.FeatureSet.EnumType[message.enumType] : message.enumType; if (message.repeatedFieldEncoding != null && message.hasOwnProperty("repeatedFieldEncoding")) object.repeatedFieldEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.RepeatedFieldEncoding[message.repeatedFieldEncoding] === undefined ? message.repeatedFieldEncoding : $root.google.protobuf.FeatureSet.RepeatedFieldEncoding[message.repeatedFieldEncoding] : message.repeatedFieldEncoding; - if (message.stringFieldValidation != null && message.hasOwnProperty("stringFieldValidation")) - object.stringFieldValidation = options.enums === String ? $root.google.protobuf.FeatureSet.StringFieldValidation[message.stringFieldValidation] === undefined ? message.stringFieldValidation : $root.google.protobuf.FeatureSet.StringFieldValidation[message.stringFieldValidation] : message.stringFieldValidation; + if (message.utf8Validation != null && message.hasOwnProperty("utf8Validation")) + object.utf8Validation = options.enums === String ? $root.google.protobuf.FeatureSet.Utf8Validation[message.utf8Validation] === undefined ? message.utf8Validation : $root.google.protobuf.FeatureSet.Utf8Validation[message.utf8Validation] : message.utf8Validation; if (message.messageEncoding != null && message.hasOwnProperty("messageEncoding")) object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; - if (message.rawFeatures != null && message.hasOwnProperty("rawFeatures")) - object.rawFeatures = $root.google.protobuf.FeatureSet.toObject(message.rawFeatures, options); return object; }; @@ -26464,19 +26573,17 @@ })(); /** - * StringFieldValidation enum. - * @name google.protobuf.FeatureSet.StringFieldValidation + * Utf8Validation enum. + * @name google.protobuf.FeatureSet.Utf8Validation * @enum {number} - * @property {number} STRING_FIELD_VALIDATION_UNKNOWN=0 STRING_FIELD_VALIDATION_UNKNOWN value - * @property {number} MANDATORY=1 MANDATORY value - * @property {number} HINT=2 HINT value + * @property {number} UTF8_VALIDATION_UNKNOWN=0 UTF8_VALIDATION_UNKNOWN value + * @property {number} VERIFY=2 VERIFY value * @property {number} NONE=3 NONE value */ - FeatureSet.StringFieldValidation = (function() { + FeatureSet.Utf8Validation = (function() { var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "STRING_FIELD_VALIDATION_UNKNOWN"] = 0; - values[valuesById[1] = "MANDATORY"] = 1; - values[valuesById[2] = "HINT"] = 2; + values[valuesById[0] = "UTF8_VALIDATION_UNKNOWN"] = 0; + values[valuesById[2] = "VERIFY"] = 2; values[valuesById[3] = "NONE"] = 3; return values; })(); @@ -26516,6 +26623,702 @@ return FeatureSet; })(); + protobuf.FeatureSetDefaults = (function() { + + /** + * Properties of a FeatureSetDefaults. + * @memberof google.protobuf + * @interface IFeatureSetDefaults + * @property {Array.|null} [defaults] FeatureSetDefaults defaults + * @property {google.protobuf.Edition|null} [minimumEdition] FeatureSetDefaults minimumEdition + * @property {google.protobuf.Edition|null} [maximumEdition] FeatureSetDefaults maximumEdition + */ + + /** + * Constructs a new FeatureSetDefaults. + * @memberof google.protobuf + * @classdesc Represents a FeatureSetDefaults. + * @implements IFeatureSetDefaults + * @constructor + * @param {google.protobuf.IFeatureSetDefaults=} [properties] Properties to set + */ + function FeatureSetDefaults(properties) { + this.defaults = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FeatureSetDefaults defaults. + * @member {Array.} defaults + * @memberof google.protobuf.FeatureSetDefaults + * @instance + */ + FeatureSetDefaults.prototype.defaults = $util.emptyArray; + + /** + * FeatureSetDefaults minimumEdition. + * @member {google.protobuf.Edition} minimumEdition + * @memberof google.protobuf.FeatureSetDefaults + * @instance + */ + FeatureSetDefaults.prototype.minimumEdition = 0; + + /** + * FeatureSetDefaults maximumEdition. + * @member {google.protobuf.Edition} maximumEdition + * @memberof google.protobuf.FeatureSetDefaults + * @instance + */ + FeatureSetDefaults.prototype.maximumEdition = 0; + + /** + * Creates a new FeatureSetDefaults instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {google.protobuf.IFeatureSetDefaults=} [properties] Properties to set + * @returns {google.protobuf.FeatureSetDefaults} FeatureSetDefaults instance + */ + FeatureSetDefaults.create = function create(properties) { + return new FeatureSetDefaults(properties); + }; + + /** + * Encodes the specified FeatureSetDefaults message. Does not implicitly {@link google.protobuf.FeatureSetDefaults.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {google.protobuf.IFeatureSetDefaults} message FeatureSetDefaults message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSetDefaults.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.defaults != null && message.defaults.length) + for (var i = 0; i < message.defaults.length; ++i) + $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.encode(message.defaults[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.minimumEdition != null && Object.hasOwnProperty.call(message, "minimumEdition")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.minimumEdition); + if (message.maximumEdition != null && Object.hasOwnProperty.call(message, "maximumEdition")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.maximumEdition); + return writer; + }; + + /** + * Encodes the specified FeatureSetDefaults message, length delimited. Does not implicitly {@link google.protobuf.FeatureSetDefaults.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {google.protobuf.IFeatureSetDefaults} message FeatureSetDefaults message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSetDefaults.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSetDefaults message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSetDefaults} FeatureSetDefaults + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSetDefaults.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSetDefaults(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.defaults && message.defaults.length)) + message.defaults = []; + message.defaults.push($root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.decode(reader, reader.uint32())); + break; + } + case 4: { + message.minimumEdition = reader.int32(); + break; + } + case 5: { + message.maximumEdition = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FeatureSetDefaults message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSetDefaults} FeatureSetDefaults + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSetDefaults.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSetDefaults message. + * @function verify + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSetDefaults.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.defaults != null && message.hasOwnProperty("defaults")) { + if (!Array.isArray(message.defaults)) + return "defaults: array expected"; + for (var i = 0; i < message.defaults.length; ++i) { + var error = $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.verify(message.defaults[i]); + if (error) + return "defaults." + error; + } + } + if (message.minimumEdition != null && message.hasOwnProperty("minimumEdition")) + switch (message.minimumEdition) { + default: + return "minimumEdition: enum value expected"; + case 0: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.maximumEdition != null && message.hasOwnProperty("maximumEdition")) + switch (message.maximumEdition) { + default: + return "maximumEdition: enum value expected"; + case 0: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + return null; + }; + + /** + * Creates a FeatureSetDefaults message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSetDefaults} FeatureSetDefaults + */ + FeatureSetDefaults.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSetDefaults) + return object; + var message = new $root.google.protobuf.FeatureSetDefaults(); + if (object.defaults) { + if (!Array.isArray(object.defaults)) + throw TypeError(".google.protobuf.FeatureSetDefaults.defaults: array expected"); + message.defaults = []; + for (var i = 0; i < object.defaults.length; ++i) { + if (typeof object.defaults[i] !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.defaults: object expected"); + message.defaults[i] = $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fromObject(object.defaults[i]); + } + } + switch (object.minimumEdition) { + default: + if (typeof object.minimumEdition === "number") { + message.minimumEdition = object.minimumEdition; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.minimumEdition = 0; + break; + case "EDITION_PROTO2": + case 998: + message.minimumEdition = 998; + break; + case "EDITION_PROTO3": + case 999: + message.minimumEdition = 999; + break; + case "EDITION_2023": + case 1000: + message.minimumEdition = 1000; + break; + case "EDITION_2024": + case 1001: + message.minimumEdition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.minimumEdition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.minimumEdition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.minimumEdition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.minimumEdition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.minimumEdition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.minimumEdition = 2147483647; + break; + } + switch (object.maximumEdition) { + default: + if (typeof object.maximumEdition === "number") { + message.maximumEdition = object.maximumEdition; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.maximumEdition = 0; + break; + case "EDITION_PROTO2": + case 998: + message.maximumEdition = 998; + break; + case "EDITION_PROTO3": + case 999: + message.maximumEdition = 999; + break; + case "EDITION_2023": + case 1000: + message.maximumEdition = 1000; + break; + case "EDITION_2024": + case 1001: + message.maximumEdition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.maximumEdition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.maximumEdition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.maximumEdition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.maximumEdition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.maximumEdition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.maximumEdition = 2147483647; + break; + } + return message; + }; + + /** + * Creates a plain object from a FeatureSetDefaults message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {google.protobuf.FeatureSetDefaults} message FeatureSetDefaults + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FeatureSetDefaults.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.defaults = []; + if (options.defaults) { + object.minimumEdition = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.maximumEdition = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.defaults && message.defaults.length) { + object.defaults = []; + for (var j = 0; j < message.defaults.length; ++j) + object.defaults[j] = $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.toObject(message.defaults[j], options); + } + if (message.minimumEdition != null && message.hasOwnProperty("minimumEdition")) + object.minimumEdition = options.enums === String ? $root.google.protobuf.Edition[message.minimumEdition] === undefined ? message.minimumEdition : $root.google.protobuf.Edition[message.minimumEdition] : message.minimumEdition; + if (message.maximumEdition != null && message.hasOwnProperty("maximumEdition")) + object.maximumEdition = options.enums === String ? $root.google.protobuf.Edition[message.maximumEdition] === undefined ? message.maximumEdition : $root.google.protobuf.Edition[message.maximumEdition] : message.maximumEdition; + return object; + }; + + /** + * Converts this FeatureSetDefaults to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSetDefaults + * @instance + * @returns {Object.} JSON object + */ + FeatureSetDefaults.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FeatureSetDefaults + * @function getTypeUrl + * @memberof google.protobuf.FeatureSetDefaults + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FeatureSetDefaults.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSetDefaults"; + }; + + FeatureSetDefaults.FeatureSetEditionDefault = (function() { + + /** + * Properties of a FeatureSetEditionDefault. + * @memberof google.protobuf.FeatureSetDefaults + * @interface IFeatureSetEditionDefault + * @property {google.protobuf.Edition|null} [edition] FeatureSetEditionDefault edition + * @property {google.protobuf.IFeatureSet|null} [features] FeatureSetEditionDefault features + */ + + /** + * Constructs a new FeatureSetEditionDefault. + * @memberof google.protobuf.FeatureSetDefaults + * @classdesc Represents a FeatureSetEditionDefault. + * @implements IFeatureSetEditionDefault + * @constructor + * @param {google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault=} [properties] Properties to set + */ + function FeatureSetEditionDefault(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FeatureSetEditionDefault edition. + * @member {google.protobuf.Edition} edition + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + */ + FeatureSetEditionDefault.prototype.edition = 0; + + /** + * FeatureSetEditionDefault features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + */ + FeatureSetEditionDefault.prototype.features = null; + + /** + * Creates a new FeatureSetEditionDefault instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault=} [properties] Properties to set + * @returns {google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault} FeatureSetEditionDefault instance + */ + FeatureSetEditionDefault.create = function create(properties) { + return new FeatureSetEditionDefault(properties); + }; + + /** + * Encodes the specified FeatureSetEditionDefault message. Does not implicitly {@link google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault} message FeatureSetEditionDefault message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSetEditionDefault.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); + return writer; + }; + + /** + * Encodes the specified FeatureSetEditionDefault message, length delimited. Does not implicitly {@link google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {google.protobuf.FeatureSetDefaults.IFeatureSetEditionDefault} message FeatureSetEditionDefault message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSetEditionDefault.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSetEditionDefault message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault} FeatureSetEditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSetEditionDefault.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + message.edition = reader.int32(); + break; + } + case 2: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FeatureSetEditionDefault message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault} FeatureSetEditionDefault + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSetEditionDefault.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSetEditionDefault message. + * @function verify + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSetEditionDefault.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.edition != null && message.hasOwnProperty("edition")) + switch (message.edition) { + default: + return "edition: enum value expected"; + case 0: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (error) + return "features." + error; + } + return null; + }; + + /** + * Creates a FeatureSetEditionDefault message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault} FeatureSetEditionDefault + */ + FeatureSetEditionDefault.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault) + return object; + var message = new $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault(); + switch (object.edition) { + default: + if (typeof object.edition === "number") { + message.edition = object.edition; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.edition = 0; + break; + case "EDITION_PROTO2": + case 998: + message.edition = 998; + break; + case "EDITION_PROTO3": + case 999: + message.edition = 999; + break; + case "EDITION_2023": + case 1000: + message.edition = 1000; + break; + case "EDITION_2024": + case 1001: + message.edition = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.edition = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.edition = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.edition = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.edition = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.edition = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.edition = 2147483647; + break; + } + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + } + return message; + }; + + /** + * Creates a plain object from a FeatureSetEditionDefault message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault} message FeatureSetEditionDefault + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FeatureSetEditionDefault.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.features = null; + object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); + if (message.edition != null && message.hasOwnProperty("edition")) + object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + return object; + }; + + /** + * Converts this FeatureSetEditionDefault to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + * @returns {Object.} JSON object + */ + FeatureSetEditionDefault.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FeatureSetEditionDefault + * @function getTypeUrl + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FeatureSetEditionDefault.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault"; + }; + + return FeatureSetEditionDefault; + })(); + + return FeatureSetDefaults; + })(); + protobuf.SourceCodeInfo = (function() { /** @@ -34728,6 +35531,7 @@ * @interface IMethodSettings * @property {string|null} [selector] MethodSettings selector * @property {google.api.MethodSettings.ILongRunning|null} [longRunning] MethodSettings longRunning + * @property {Array.|null} [autoPopulatedFields] MethodSettings autoPopulatedFields */ /** @@ -34739,6 +35543,7 @@ * @param {google.api.IMethodSettings=} [properties] Properties to set */ function MethodSettings(properties) { + this.autoPopulatedFields = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -34761,6 +35566,14 @@ */ MethodSettings.prototype.longRunning = null; + /** + * MethodSettings autoPopulatedFields. + * @member {Array.} autoPopulatedFields + * @memberof google.api.MethodSettings + * @instance + */ + MethodSettings.prototype.autoPopulatedFields = $util.emptyArray; + /** * Creates a new MethodSettings instance using the specified properties. * @function create @@ -34789,6 +35602,9 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); if (message.longRunning != null && Object.hasOwnProperty.call(message, "longRunning")) $root.google.api.MethodSettings.LongRunning.encode(message.longRunning, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.autoPopulatedFields != null && message.autoPopulatedFields.length) + for (var i = 0; i < message.autoPopulatedFields.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.autoPopulatedFields[i]); return writer; }; @@ -34831,6 +35647,12 @@ message.longRunning = $root.google.api.MethodSettings.LongRunning.decode(reader, reader.uint32()); break; } + case 3: { + if (!(message.autoPopulatedFields && message.autoPopulatedFields.length)) + message.autoPopulatedFields = []; + message.autoPopulatedFields.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -34874,6 +35696,13 @@ if (error) return "longRunning." + error; } + if (message.autoPopulatedFields != null && message.hasOwnProperty("autoPopulatedFields")) { + if (!Array.isArray(message.autoPopulatedFields)) + return "autoPopulatedFields: array expected"; + for (var i = 0; i < message.autoPopulatedFields.length; ++i) + if (!$util.isString(message.autoPopulatedFields[i])) + return "autoPopulatedFields: string[] expected"; + } return null; }; @@ -34896,6 +35725,13 @@ throw TypeError(".google.api.MethodSettings.longRunning: object expected"); message.longRunning = $root.google.api.MethodSettings.LongRunning.fromObject(object.longRunning); } + if (object.autoPopulatedFields) { + if (!Array.isArray(object.autoPopulatedFields)) + throw TypeError(".google.api.MethodSettings.autoPopulatedFields: array expected"); + message.autoPopulatedFields = []; + for (var i = 0; i < object.autoPopulatedFields.length; ++i) + message.autoPopulatedFields[i] = String(object.autoPopulatedFields[i]); + } return message; }; @@ -34912,6 +35748,8 @@ if (!options) options = {}; var object = {}; + if (options.arrays || options.defaults) + object.autoPopulatedFields = []; if (options.defaults) { object.selector = ""; object.longRunning = null; @@ -34920,6 +35758,11 @@ object.selector = message.selector; if (message.longRunning != null && message.hasOwnProperty("longRunning")) object.longRunning = $root.google.api.MethodSettings.LongRunning.toObject(message.longRunning, options); + if (message.autoPopulatedFields && message.autoPopulatedFields.length) { + object.autoPopulatedFields = []; + for (var j = 0; j < message.autoPopulatedFields.length; ++j) + object.autoPopulatedFields[j] = message.autoPopulatedFields[j]; + } return object; }; @@ -35320,6 +36163,7 @@ * @property {number} IMMUTABLE=5 IMMUTABLE value * @property {number} UNORDERED_LIST=6 UNORDERED_LIST value * @property {number} NON_EMPTY_DEFAULT=7 NON_EMPTY_DEFAULT value + * @property {number} IDENTIFIER=8 IDENTIFIER value */ api.FieldBehavior = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -35331,6 +36175,7 @@ values[valuesById[5] = "IMMUTABLE"] = 5; values[valuesById[6] = "UNORDERED_LIST"] = 6; values[valuesById[7] = "NON_EMPTY_DEFAULT"] = 7; + values[valuesById[8] = "IDENTIFIER"] = 8; return values; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 68dd1adaf1c..839cd526c2f 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1638,6 +1638,21 @@ } } }, + "Edition": { + "values": { + "EDITION_UNKNOWN": 0, + "EDITION_PROTO2": 998, + "EDITION_PROTO3": 999, + "EDITION_2023": 1000, + "EDITION_2024": 1001, + "EDITION_1_TEST_ONLY": 1, + "EDITION_2_TEST_ONLY": 2, + "EDITION_99997_TEST_ONLY": 99997, + "EDITION_99998_TEST_ONLY": 99998, + "EDITION_99999_TEST_ONLY": 99999, + "EDITION_MAX": 2147483647 + } + }, "FileDescriptorProto": { "fields": { "name": { @@ -1702,8 +1717,8 @@ "id": 12 }, "edition": { - "type": "string", - "id": 13 + "type": "Edition", + "id": 14 } } }, @@ -1812,7 +1827,8 @@ "type": "VerificationState", "id": 3, "options": { - "default": "UNVERIFIED" + "default": "UNVERIFIED", + "retention": "RETENTION_SOURCE" } } }, @@ -1934,8 +1950,8 @@ "Label": { "values": { "LABEL_OPTIONAL": 1, - "LABEL_REQUIRED": 2, - "LABEL_REPEATED": 3 + "LABEL_REPEATED": 3, + "LABEL_REQUIRED": 2 } } } @@ -2123,13 +2139,6 @@ "default": false } }, - "phpGenericServices": { - "type": "bool", - "id": 42, - "options": { - "default": false - } - }, "deprecated": { "type": "bool", "id": 23, @@ -2189,6 +2198,10 @@ ] ], "reserved": [ + [ + 42, + 42 + ], [ 38, 38 @@ -2414,8 +2427,8 @@ "EditionDefault": { "fields": { "edition": { - "type": "string", - "id": 1 + "type": "Edition", + "id": 3 }, "value": { "type": "string", @@ -2645,7 +2658,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", + "edition_defaults.edition": "EDITION_2023", "edition_defaults.value": "EXPLICIT" } }, @@ -2655,7 +2668,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", + "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "OPEN" } }, @@ -2665,18 +2678,18 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", + "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "PACKED" } }, - "stringFieldValidation": { - "type": "StringFieldValidation", + "utf8Validation": { + "type": "Utf8Validation", "id": 4, "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", - "edition_defaults.value": "MANDATORY" + "edition_defaults.edition": "EDITION_PROTO3", + "edition_defaults.value": "VERIFY" } }, "messageEncoding": { @@ -2685,7 +2698,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", + "edition_defaults.edition": "EDITION_PROTO2", "edition_defaults.value": "LENGTH_PREFIXED" } }, @@ -2695,16 +2708,9 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "2023", + "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "ALLOW" } - }, - "rawFeatures": { - "type": "FeatureSet", - "id": 999, - "options": { - "targets": "TARGET_TYPE_UNKNOWN" - } } }, "extensions": [ @@ -2721,6 +2727,12 @@ 9999 ] ], + "reserved": [ + [ + 999, + 999 + ] + ], "nested": { "FieldPresence": { "values": { @@ -2744,11 +2756,10 @@ "EXPANDED": 2 } }, - "StringFieldValidation": { + "Utf8Validation": { "values": { - "STRING_FIELD_VALIDATION_UNKNOWN": 0, - "MANDATORY": 1, - "HINT": 2, + "UTF8_VALIDATION_UNKNOWN": 0, + "VERIFY": 2, "NONE": 3 } }, @@ -2768,6 +2779,37 @@ } } }, + "FeatureSetDefaults": { + "fields": { + "defaults": { + "rule": "repeated", + "type": "FeatureSetEditionDefault", + "id": 1 + }, + "minimumEdition": { + "type": "Edition", + "id": 4 + }, + "maximumEdition": { + "type": "Edition", + "id": 5 + } + }, + "nested": { + "FeatureSetEditionDefault": { + "fields": { + "edition": { + "type": "Edition", + "id": 3 + }, + "features": { + "type": "FeatureSet", + "id": 2 + } + } + } + } + }, "SourceCodeInfo": { "fields": { "location": { @@ -3295,6 +3337,11 @@ "longRunning": { "type": "LongRunning", "id": 2 + }, + "autoPopulatedFields": { + "rule": "repeated", + "type": "string", + "id": 3 } }, "nested": { @@ -3366,7 +3413,8 @@ "INPUT_ONLY": 4, "IMMUTABLE": 5, "UNORDERED_LIST": 6, - "NON_EMPTY_DEFAULT": 7 + "NON_EMPTY_DEFAULT": 7, + "IDENTIFIER": 8 } }, "resourceReference": { diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 85008b70dcf..8733d044438 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -49,3 +49,12 @@ import * as protos from '../protos/protos'; export {protos}; import * as adapt from './adapt'; export {adapt}; + +// Add extra protobufjs definitions. +// When importing protobufjs/ext/descriptor package, it monkey patches some methods +// that we use in this package. We need to manually declare some of those +// methods that we use to make the Typescript compiler happy. +// There are some open issues around that. After they are fixed, we can remove this: +// * https://github.com/protobufjs/protobuf.js/issues/1499 +// * https://github.com/protobufjs/protobuf.js/issues/1149 +import './protobuf'; diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts index 8389860cd98..0afe5755434 100644 --- a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -import {protobuf} from 'google-gax'; +import * as protobuf from 'protobufjs'; import * as protos from '../../protos/protos'; import {PendingWrite} from './pending_write'; import {StreamConnection, RemoveListener} from './stream_connection'; @@ -90,8 +90,7 @@ export class JSONWriter { const normalized = adapt.normalizeDescriptor( new DescriptorProto(protoDescriptor) ); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this._type = (Type as any).fromDescriptor(normalized); + this._type = Type.fromDescriptor(normalized); this._writer.setProtoDescriptor(protoDescriptor); } diff --git a/handwritten/bigquery-storage/src/protobuf/index.ts b/handwritten/bigquery-storage/src/protobuf/index.ts new file mode 100644 index 00000000000..1091c343834 --- /dev/null +++ b/handwritten/bigquery-storage/src/protobuf/index.ts @@ -0,0 +1,27 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import 'protobufjs'; +import 'protobufjs/ext/descriptor'; +import * as protos from '../../protos/protos'; + +type IDescriptorProto = protos.google.protobuf.IDescriptorProto; + +declare module 'protobufjs' { + // eslint-disable-next-line @typescript-eslint/no-namespace + namespace Type { + let toDescriptor: (protoVersion: string) => IDescriptorProto; + let fromDescriptor: (descriptor: IDescriptorProto) => Type; + } +} diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 8ea0a09123a..c795ef4dab4 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -19,7 +19,8 @@ import * as gax from 'google-gax'; import {BigQuery, TableSchema} from '@google-cloud/bigquery'; import * as protos from '../protos/protos'; import * as bigquerywriter from '../src'; -import {ClientOptions, protobuf} from 'google-gax'; +import * as protobuf from 'protobufjs'; +import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; const {managedwriter, adapt} = bigquerywriter; @@ -655,8 +656,8 @@ describe('managedwriter.WriterClient', () => { }); protoDescriptor.field = protoDescriptor.field?.slice(0, 1); // leave just first field - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const invalidProto = (Type as any).fromDescriptor( + + const invalidProto = Type.fromDescriptor( protoDescriptor ) as protobuf.Type; const row = { diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 99fea44edd2..d350e440f1b 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -14,7 +14,7 @@ import * as assert from 'assert'; import {describe, it} from 'mocha'; -import {protobuf} from 'google-gax'; +import * as protobuf from 'protobufjs'; import * as adapt from '../../src/adapt'; import * as messagesJSON from '../../samples/testdata/messages.json'; import * as protos from '../../protos/protos'; @@ -59,7 +59,7 @@ describe('Adapt Protos', () => { if (!protoDescriptor) { throw Error('null proto descriptor set'); } - const TestProto = (Type as any).fromDescriptor(protoDescriptor); + const TestProto = Type.fromDescriptor(protoDescriptor); const raw = { foo: 'name', bar: 42, @@ -191,7 +191,7 @@ describe('Adapt Protos', () => { }, ], }); - const NestedProto = (Type as any).fromDescriptor(protoDescriptor); + const NestedProto = Type.fromDescriptor(protoDescriptor); const raw = { record_id: '12345', recordDetails: [ From 3c5ead9b56714597d8fe82fe6cbbba87cca3e7ea Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 22 Jan 2024 16:04:59 -0400 Subject: [PATCH 246/333] test: fix test for large insert (#405) --- .../system-test/managed_writer_client_test.ts | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index c795ef4dab4..7b7dfa0b775 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -760,17 +760,10 @@ describe('managedwriter.WriterClient', () => { }, 0 ); - let foundErr: Error | null = null; - try { - await badPw.getResult(); - } catch (err) { - foundErr = err as Error; - } - assert.notEqual(foundErr, null); - assert.equal( - foundErr?.message.includes('contains an invalid argument.'), - true - ); + + let res = await badPw.getResult(); + assert.notEqual(res.error, null); + assert.equal(res.error?.message?.includes('request too large'), true); const goodPw = writer.appendRows( { @@ -778,7 +771,7 @@ describe('managedwriter.WriterClient', () => { }, 0 ); - const res = await goodPw.getResult(); + res = await goodPw.getResult(); assert.equal(res.appendResult?.offset?.value, '0'); writer.close(); From 069da98cccde2bd102ac8de99ec181e11eb2e1f5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 22 Jan 2024 21:10:15 +0100 Subject: [PATCH 247/333] chore(deps): update dependency gapic-tools to ^0.3.0 (#406) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [gapic-tools](https://togithub.com/googleapis/gax-nodejs) ([source](https://togithub.com/googleapis/gax-nodejs/tree/HEAD/gapic-tools)) | [`^0.2.0` -> `^0.3.0`](https://renovatebot.com/diffs/npm/gapic-tools/0.2.0/0.3.0) | [![age](https://developer.mend.io/api/mc/badges/age/npm/gapic-tools/0.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/gapic-tools/0.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/gapic-tools/0.2.0/0.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/gapic-tools/0.2.0/0.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/gax-nodejs (gapic-tools) ### [`v0.3.0`](https://togithub.com/googleapis/gax-nodejs/releases/tag/gapic-tools-v0.3.0): gapic-tools: v0.3.0 [Compare Source](https://togithub.com/googleapis/gax-nodejs/compare/gapic-tools-v0.2.0...gapic-tools-v0.3.0)
0.3.0 #### [0.3.0](https://togithub.com/googleapis/gax-nodejs/compare/v0.2.0...v0.3.0) (2024-01-16) ##### Features - add ESM tools in gax ([#​1459](https://togithub.com/googleapis/gax-nodejs/issues/1459)) ([0fb1cf9](https://togithub.com/googleapis/gax-nodejs/commit/0fb1cf9acd32dc1ae03a33279eca9449a7d3fca7)) ##### Bug Fixes - **deps:** update dependency google-proto-files to v4 ([#​1490](https://togithub.com/googleapis/gax-nodejs/issues/1490)) ([4748c9f](https://togithub.com/googleapis/gax-nodejs/commit/4748c9fc3a8cfe31e5abb3e35a6ee0d9a6f0e560)) - **deps:** update dependency protobufjs-cli to v1.1.2 ([#​1495](https://togithub.com/googleapis/gax-nodejs/issues/1495)) ([762591e](https://togithub.com/googleapis/gax-nodejs/commit/762591ed28801e5311ab737b04185781a41752e6)) - make gapic-tools depend on gax-nodejs ([#​1480](https://togithub.com/googleapis/gax-nodejs/issues/1480)) ([d0f410d](https://togithub.com/googleapis/gax-nodejs/commit/d0f410d2e08f393f2661c8c92568a0b518fddf99)) - release new version of gapic-tools ([#​1483](https://togithub.com/googleapis/gax-nodejs/issues/1483)) ([e4f5482](https://togithub.com/googleapis/gax-nodejs/commit/e4f548254bfce3daa3b02ae81764bb3394fc4f23)) - update google-gax and google-proto-files ([#​1533](https://togithub.com/googleapis/gax-nodejs/issues/1533)) ([4897bc6](https://togithub.com/googleapis/gax-nodejs/commit/4897bc60db5e22427e533805d528f434e0cc8e40))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://developer.mend.io/github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a6c20149ef6..ed12abd165e 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -39,7 +39,7 @@ "@types/node": "^18.0.0", "@types/sinon": "^10.0.0", "c8": "^8.0.0", - "gapic-tools": "^0.2.0", + "gapic-tools": "^0.3.0", "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^3.0.0", From f415dd7a9439ec84cb6a5fd71968330017ca6977 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 24 Jan 2024 18:33:21 +0100 Subject: [PATCH 248/333] chore(deps): update dependency @types/node to v20 (#392) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index ed12abd165e..90d1a2da981 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -36,7 +36,7 @@ "@google-cloud/bigquery": "^7.0.0", "@types/uuid": "^9.0.1", "@types/mocha": "^9.0.0", - "@types/node": "^18.0.0", + "@types/node": "^20.0.0", "@types/sinon": "^10.0.0", "c8": "^8.0.0", "gapic-tools": "^0.3.0", From f5f72e19588a91ed5edb404e380056f485560495 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 Feb 2024 08:17:35 +0100 Subject: [PATCH 249/333] chore(deps): update dependency @types/sinon to v17 (#397) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 90d1a2da981..374d388d7a4 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -37,7 +37,7 @@ "@types/uuid": "^9.0.1", "@types/mocha": "^9.0.0", "@types/node": "^20.0.0", - "@types/sinon": "^10.0.0", + "@types/sinon": "^17.0.0", "c8": "^8.0.0", "gapic-tools": "^0.3.0", "gts": "^5.0.0", From af2ab685479b33b86b717d1100f56264c3c3216e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 Feb 2024 08:33:37 +0100 Subject: [PATCH 250/333] chore(deps): update dependency c8 to v9 (#404) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 374d388d7a4..de8f1b26c87 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -38,7 +38,7 @@ "@types/mocha": "^9.0.0", "@types/node": "^20.0.0", "@types/sinon": "^17.0.0", - "c8": "^8.0.0", + "c8": "^9.0.0", "gapic-tools": "^0.3.0", "gts": "^5.0.0", "jsdoc": "^4.0.0", From 2343196ae42342826f5bd5dc33c1fb25ebc93c6d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 13:34:06 -0400 Subject: [PATCH 251/333] chore(main): release 4.2.1 (#408) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.2.1 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- ...snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 13 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index cce71a70342..861ea70132b 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [4.2.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.2.0...v4.2.1) (2024-02-06) + + +### Bug Fixes + +* Force import protobufjs/ext/descriptor package ([#407](https://github.com/googleapis/nodejs-bigquery-storage/issues/407)) ([a970824](https://github.com/googleapis/nodejs-bigquery-storage/commit/a970824d07f7a5faebe1a7972f6c19f80d2fe38c)) +* Large inserts AppendRow requests ([432835c](https://github.com/googleapis/nodejs-bigquery-storage/commit/432835cf9edc006df100de9a29697221dcc05247)) + ## [4.2.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.1.0...v4.2.0) (2023-09-26) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index de8f1b26c87..e259e1da0ea 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.2.0", + "version": "4.2.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index ed8e3dc17b5..c64e3155b51 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.0", + "version": "4.2.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index e538f1b195b..de3c2525992 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.0", + "version": "4.2.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 80fbbfc0e61..c8144a014a7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.0", + "version": "4.2.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 80fbbfc0e61..c8144a014a7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.0", + "version": "4.2.1", "language": "TYPESCRIPT", "apis": [ { From 83e5125aff3bafce2441829d80e8a4afc8fca711 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 11:52:48 -0400 Subject: [PATCH 252/333] build: update gapic-generator-typescript to v4.4.1 (#399) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build: update gapic-generator-typescript to v4.4.1 PiperOrigin-RevId: 604765466 Source-Link: https://github.com/googleapis/googleapis/commit/40203ca1880849480bbff7b8715491060bbccdf1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/07b7f3dad8aa1912d4acdcfd6365bb4236e4b54b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDdiN2YzZGFkOGFhMTkxMmQ0YWNkY2ZkNjM2NWJiNDIzNmU0YjU0YiJ9 feat: Trusted Private Cloud support, use the universeDomain parameter feat: auto populate UUID fields where needed fix: revert changes to streaming retries Use gapic-generator-typescript v4.4.0. PiperOrigin-RevId: 603757799 Source-Link: https://github.com/googleapis/googleapis/commit/1a45bf7393b52407188c82e63101db7dc9c72026 Source-Link: https://github.com/googleapis/googleapis-gen/commit/19ca4b45a53d00cb7bdd94b442b60bd237dfe123 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTljYTRiNDVhNTNkMDBjYjdiZGQ5NGI0NDJiNjBiZDIzN2RmZTEyMyJ9 fix: improve retry logic for streaming API calls build: update typescript generator version to 4.3.0 The streaming API call retry logic has changed, which in some rare cases may require code changes. Please feel free to reach out to us in the issues if you experience new problems with retrying streaming calls after this update. PiperOrigin-RevId: 599622271 Source-Link: https://github.com/googleapis/googleapis/commit/6239c217f083277d7a43c8bee55969654c3b2fee Source-Link: https://github.com/googleapis/googleapis-gen/commit/da13d8222d3ba33734501999864458640f1405ae Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGExM2Q4MjIyZDNiYTMzNzM0NTAxOTk5ODY0NDU4NjQwZjE0MDVhZSJ9 feat: add ability to request compressed ReadRowsResponse rows This change allows the client to request raw lz4 compression of the ReadRowsResponse rows data for both ArrowRecordBatches and Avro rows. PiperOrigin-RevId: 597000088 Source-Link: https://github.com/googleapis/googleapis/commit/341d70f9f3ac6c042309d9bc3c52edc94d95b5fb Source-Link: https://github.com/googleapis/googleapis-gen/commit/01713f3f5534acc78f04d59e13c0668c8129bf03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE3MTNmM2Y1NTM0YWNjNzhmMDRkNTllMTNjMDY2OGM4MTI5YmYwMyJ9 fix: correct long audio synthesis HTTP binding docs: Deprecate the custom voice usage field PiperOrigin-RevId: 595119987 Source-Link: https://github.com/googleapis/googleapis/commit/c22f4081fe394091ff2bb35b39b604ebb0e903cb Source-Link: https://github.com/googleapis/googleapis-gen/commit/4e9ca63d2cc7933eb7c383ce8b794fce152ea2fc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGU5Y2E2M2QyY2M3OTMzZWI3YzM4M2NlOGI3OTRmY2UxNTJlYTJmYyJ9 build: update Node.js generator to compile protos PiperOrigin-RevId: 582493526 Source-Link: https://github.com/googleapis/googleapis/commit/7c4e4b52369c9f6ac3e78f945d36fc833f2280de Source-Link: https://github.com/googleapis/googleapis-gen/commit/368cfb651016d6a93ca6e488cbc34e2d1d9d212c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzY4Y2ZiNjUxMDE2ZDZhOTNjYTZlNDg4Y2JjMzRlMmQxZDlkMjEyYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.jsdoc.js | 4 +- .../cloud/bigquery/storage/v1/storage.proto | 17 +++ .../cloud/bigquery/storage/v1/stream.proto | 20 +++ .../bigquery-storage/protos/protos.d.ts | 27 ++++ handwritten/bigquery-storage/protos/protos.js | 119 ++++++++++++++++++ .../bigquery-storage/protos/protos.json | 34 +++++ .../v1/big_query_read.create_read_session.js | 2 +- .../generated/v1/big_query_read.read_rows.js | 2 +- .../v1/big_query_read.split_read_stream.js | 2 +- .../v1/big_query_write.append_rows.js | 4 +- ..._query_write.batch_commit_write_streams.js | 2 +- .../v1/big_query_write.create_write_stream.js | 2 +- .../big_query_write.finalize_write_stream.js | 2 +- .../v1/big_query_write.flush_rows.js | 2 +- .../v1/big_query_write.get_write_stream.js | 2 +- ...orage.batch_create_read_session_streams.js | 2 +- .../big_query_storage.create_read_session.js | 2 +- .../big_query_storage.finalize_stream.js | 2 +- .../v1beta1/big_query_storage.read_rows.js | 2 +- .../big_query_storage.split_read_stream.js | 2 +- .../src/v1/big_query_read_client.ts | 63 ++++++++-- .../src/v1/big_query_write_client.ts | 63 ++++++++-- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../src/v1beta1/big_query_storage_client.ts | 63 ++++++++-- .../bigquery-storage/src/v1beta1/index.ts | 2 +- .../system-test/fixtures/sample/src/index.js | 3 +- .../system-test/fixtures/sample/src/index.ts | 7 +- .../bigquery-storage/system-test/install.ts | 2 +- .../test/gapic_big_query_read_v1.ts | 62 +++++++-- .../test/gapic_big_query_storage_v1beta1.ts | 64 ++++++++-- .../test/gapic_big_query_write_v1.ts | 64 ++++++++-- 31 files changed, 571 insertions(+), 75 deletions(-) diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index dcf0053ac8d..e5beb93de77 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2023 Google LLC', + copyright: 'Copyright 2024 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 187bf549dac..97eb35c214d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -348,6 +348,23 @@ message ReadRowsResponse { // Output only. Arrow schema. ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; } + + // Optional. If the row data in this ReadRowsResponse is compressed, then + // uncompressed byte size is the original size of the uncompressed row data. + // If it is set to a value greater than 0, then decompress into a buffer of + // size uncompressed_byte_size using the compression codec that was requested + // during session creation time and which is specified in + // TableReadOptions.response_compression_codec in ReadSession. + // This value is not set if no response_compression_codec was not requested + // and it is -1 if the requested compression would not have reduced the size + // of this ReadRowsResponse's row data. This attempts to match Apache Arrow's + // behavior described here https://github.com/apache/arrow/issues/15102 where + // the uncompressed length may be set to -1 to indicate that the data that + // follows is not compressed, which can be useful for cases where compression + // does not yield appreciable savings. When uncompressed_byte_size is not + // greater than 0, the client should skip decompression. + optional int64 uncompressed_byte_size = 9 + [(google.api.field_behavior) = OPTIONAL]; } // Request message for `SplitReadStream`. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 785c74f788d..c75c637cf3f 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -59,6 +59,21 @@ message ReadSession { // Options dictating how we read a table. message TableReadOptions { + // Specifies which compression codec to attempt on the entire serialized + // response payload (either Arrow record batch or Avro rows). This is + // not to be confused with the Apache Arrow native compression codecs + // specified in ArrowSerializationOptions. For performance reasons, when + // creating a read session requesting Arrow responses, setting both native + // Arrow compression and application-level response compression will not be + // allowed - choose, at most, one kind of compression. + enum ResponseCompressionCodec { + // Default is no compression. + RESPONSE_COMPRESSION_CODEC_UNSPECIFIED = 0; + + // Use raw LZ4 compression. + RESPONSE_COMPRESSION_CODEC_LZ4 = 2; + } + // Optional. The names of the fields in the table to be returned. If no // field names are specified, then all fields in the table are returned. // @@ -138,6 +153,11 @@ message ReadSession { // https://cloud.google.com/bigquery/docs/table-sampling) optional double sample_percentage = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Set response_compression_codec when creating a read session to + // enable application-level compression of ReadRows responses. + optional ResponseCompressionCodec response_compression_codec = 6 + [(google.api.field_behavior) = OPTIONAL]; } // Output only. Unique identifier for the session, in the form diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index ba25aecc488..0bd59bff934 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1605,6 +1605,9 @@ export namespace google { /** ReadRowsResponse arrowSchema */ arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ReadRowsResponse uncompressedByteSize */ + uncompressedByteSize?: (number|Long|string|null); } /** Represents a ReadRowsResponse. */ @@ -1637,12 +1640,18 @@ export namespace google { /** ReadRowsResponse arrowSchema. */ public arrowSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + /** ReadRowsResponse uncompressedByteSize. */ + public uncompressedByteSize?: (number|Long|string|null); + /** ReadRowsResponse rows. */ public rows?: ("avroRows"|"arrowRecordBatch"); /** ReadRowsResponse schema. */ public schema?: ("avroSchema"|"arrowSchema"); + /** ReadRowsResponse _uncompressedByteSize. */ + public _uncompressedByteSize?: "uncompressedByteSize"; + /** * Creates a new ReadRowsResponse instance using the specified properties. * @param [properties] Properties to set @@ -3744,6 +3753,9 @@ export namespace google { /** TableReadOptions samplePercentage */ samplePercentage?: (number|null); + + /** TableReadOptions responseCompressionCodec */ + responseCompressionCodec?: (google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|null); } /** Represents a TableReadOptions. */ @@ -3770,12 +3782,18 @@ export namespace google { /** TableReadOptions samplePercentage. */ public samplePercentage?: (number|null); + /** TableReadOptions responseCompressionCodec. */ + public responseCompressionCodec?: (google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|null); + /** TableReadOptions outputFormatSerializationOptions. */ public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); /** TableReadOptions _samplePercentage. */ public _samplePercentage?: "samplePercentage"; + /** TableReadOptions _responseCompressionCodec. */ + public _responseCompressionCodec?: "responseCompressionCodec"; + /** * Creates a new TableReadOptions instance using the specified properties. * @param [properties] Properties to set @@ -3853,6 +3871,15 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + namespace TableReadOptions { + + /** ResponseCompressionCodec enum. */ + enum ResponseCompressionCodec { + RESPONSE_COMPRESSION_CODEC_UNSPECIFIED = 0, + RESPONSE_COMPRESSION_CODEC_LZ4 = 2 + } + } } /** Properties of a ReadStream. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index a42af655309..6cf0ac8b67a 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -3403,6 +3403,7 @@ * @property {google.cloud.bigquery.storage.v1.IThrottleState|null} [throttleState] ReadRowsResponse throttleState * @property {google.cloud.bigquery.storage.v1.IAvroSchema|null} [avroSchema] ReadRowsResponse avroSchema * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [arrowSchema] ReadRowsResponse arrowSchema + * @property {number|Long|null} [uncompressedByteSize] ReadRowsResponse uncompressedByteSize */ /** @@ -3476,6 +3477,14 @@ */ ReadRowsResponse.prototype.arrowSchema = null; + /** + * ReadRowsResponse uncompressedByteSize. + * @member {number|Long|null|undefined} uncompressedByteSize + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + ReadRowsResponse.prototype.uncompressedByteSize = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -3501,6 +3510,17 @@ set: $util.oneOfSetter($oneOfFields) }); + /** + * ReadRowsResponse _uncompressedByteSize. + * @member {"uncompressedByteSize"|undefined} _uncompressedByteSize + * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse + * @instance + */ + Object.defineProperty(ReadRowsResponse.prototype, "_uncompressedByteSize", { + get: $util.oneOfGetter($oneOfFields = ["uncompressedByteSize"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new ReadRowsResponse instance using the specified properties. * @function create @@ -3539,6 +3559,8 @@ $root.google.cloud.bigquery.storage.v1.AvroSchema.encode(message.avroSchema, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); if (message.arrowSchema != null && Object.hasOwnProperty.call(message, "arrowSchema")) $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.arrowSchema, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.uncompressedByteSize != null && Object.hasOwnProperty.call(message, "uncompressedByteSize")) + writer.uint32(/* id 9, wireType 0 =*/72).int64(message.uncompressedByteSize); return writer; }; @@ -3601,6 +3623,10 @@ message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); break; } + case 9: { + message.uncompressedByteSize = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -3686,6 +3712,11 @@ return "arrowSchema." + error; } } + if (message.uncompressedByteSize != null && message.hasOwnProperty("uncompressedByteSize")) { + properties._uncompressedByteSize = 1; + if (!$util.isInteger(message.uncompressedByteSize) && !(message.uncompressedByteSize && $util.isInteger(message.uncompressedByteSize.low) && $util.isInteger(message.uncompressedByteSize.high))) + return "uncompressedByteSize: integer|Long expected"; + } return null; }; @@ -3740,6 +3771,15 @@ throw TypeError(".google.cloud.bigquery.storage.v1.ReadRowsResponse.arrowSchema: object expected"); message.arrowSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.arrowSchema); } + if (object.uncompressedByteSize != null) + if ($util.Long) + (message.uncompressedByteSize = $util.Long.fromValue(object.uncompressedByteSize)).unsigned = false; + else if (typeof object.uncompressedByteSize === "string") + message.uncompressedByteSize = parseInt(object.uncompressedByteSize, 10); + else if (typeof object.uncompressedByteSize === "number") + message.uncompressedByteSize = object.uncompressedByteSize; + else if (typeof object.uncompressedByteSize === "object") + message.uncompressedByteSize = new $util.LongBits(object.uncompressedByteSize.low >>> 0, object.uncompressedByteSize.high >>> 0).toNumber(); return message; }; @@ -3794,6 +3834,14 @@ if (options.oneofs) object.schema = "arrowSchema"; } + if (message.uncompressedByteSize != null && message.hasOwnProperty("uncompressedByteSize")) { + if (typeof message.uncompressedByteSize === "number") + object.uncompressedByteSize = options.longs === String ? String(message.uncompressedByteSize) : message.uncompressedByteSize; + else + object.uncompressedByteSize = options.longs === String ? $util.Long.prototype.toString.call(message.uncompressedByteSize) : options.longs === Number ? new $util.LongBits(message.uncompressedByteSize.low >>> 0, message.uncompressedByteSize.high >>> 0).toNumber() : message.uncompressedByteSize; + if (options.oneofs) + object._uncompressedByteSize = "uncompressedByteSize"; + } return object; }; @@ -8916,6 +8964,7 @@ * @property {google.cloud.bigquery.storage.v1.IArrowSerializationOptions|null} [arrowSerializationOptions] TableReadOptions arrowSerializationOptions * @property {google.cloud.bigquery.storage.v1.IAvroSerializationOptions|null} [avroSerializationOptions] TableReadOptions avroSerializationOptions * @property {number|null} [samplePercentage] TableReadOptions samplePercentage + * @property {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|null} [responseCompressionCodec] TableReadOptions responseCompressionCodec */ /** @@ -8974,6 +9023,14 @@ */ TableReadOptions.prototype.samplePercentage = null; + /** + * TableReadOptions responseCompressionCodec. + * @member {google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec|null|undefined} responseCompressionCodec + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + TableReadOptions.prototype.responseCompressionCodec = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -8999,6 +9056,17 @@ set: $util.oneOfSetter($oneOfFields) }); + /** + * TableReadOptions _responseCompressionCodec. + * @member {"responseCompressionCodec"|undefined} _responseCompressionCodec + * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions + * @instance + */ + Object.defineProperty(TableReadOptions.prototype, "_responseCompressionCodec", { + get: $util.oneOfGetter($oneOfFields = ["responseCompressionCodec"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new TableReadOptions instance using the specified properties. * @function create @@ -9034,6 +9102,8 @@ $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.encode(message.avroSerializationOptions, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.samplePercentage != null && Object.hasOwnProperty.call(message, "samplePercentage")) writer.uint32(/* id 5, wireType 1 =*/41).double(message.samplePercentage); + if (message.responseCompressionCodec != null && Object.hasOwnProperty.call(message, "responseCompressionCodec")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.responseCompressionCodec); return writer; }; @@ -9090,6 +9160,10 @@ message.samplePercentage = reader.double(); break; } + case 6: { + message.responseCompressionCodec = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -9159,6 +9233,16 @@ if (typeof message.samplePercentage !== "number") return "samplePercentage: number expected"; } + if (message.responseCompressionCodec != null && message.hasOwnProperty("responseCompressionCodec")) { + properties._responseCompressionCodec = 1; + switch (message.responseCompressionCodec) { + default: + return "responseCompressionCodec: enum value expected"; + case 0: + case 2: + break; + } + } return null; }; @@ -9195,6 +9279,22 @@ } if (object.samplePercentage != null) message.samplePercentage = Number(object.samplePercentage); + switch (object.responseCompressionCodec) { + default: + if (typeof object.responseCompressionCodec === "number") { + message.responseCompressionCodec = object.responseCompressionCodec; + break; + } + break; + case "RESPONSE_COMPRESSION_CODEC_UNSPECIFIED": + case 0: + message.responseCompressionCodec = 0; + break; + case "RESPONSE_COMPRESSION_CODEC_LZ4": + case 2: + message.responseCompressionCodec = 2; + break; + } return message; }; @@ -9237,6 +9337,11 @@ if (options.oneofs) object._samplePercentage = "samplePercentage"; } + if (message.responseCompressionCodec != null && message.hasOwnProperty("responseCompressionCodec")) { + object.responseCompressionCodec = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec[message.responseCompressionCodec] === undefined ? message.responseCompressionCodec : $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec[message.responseCompressionCodec] : message.responseCompressionCodec; + if (options.oneofs) + object._responseCompressionCodec = "responseCompressionCodec"; + } return object; }; @@ -9266,6 +9371,20 @@ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"; }; + /** + * ResponseCompressionCodec enum. + * @name google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodec + * @enum {number} + * @property {number} RESPONSE_COMPRESSION_CODEC_UNSPECIFIED=0 RESPONSE_COMPRESSION_CODEC_UNSPECIFIED value + * @property {number} RESPONSE_COMPRESSION_CODEC_LZ4=2 RESPONSE_COMPRESSION_CODEC_LZ4 value + */ + TableReadOptions.ResponseCompressionCodec = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "RESPONSE_COMPRESSION_CODEC_UNSPECIFIED"] = 0; + values[valuesById[2] = "RESPONSE_COMPRESSION_CODEC_LZ4"] = 2; + return values; + })(); + return TableReadOptions; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 839cd526c2f..462a8b51cd9 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -396,6 +396,11 @@ "avroSchema", "arrowSchema" ] + }, + "_uncompressedByteSize": { + "oneof": [ + "uncompressedByteSize" + ] } }, "fields": { @@ -432,6 +437,14 @@ "options": { "(google.api.field_behavior)": "OUTPUT_ONLY" } + }, + "uncompressedByteSize": { + "type": "int64", + "id": 9, + "options": { + "(google.api.field_behavior)": "OPTIONAL", + "proto3_optional": true + } } } }, @@ -879,6 +892,11 @@ "oneof": [ "samplePercentage" ] + }, + "_responseCompressionCodec": { + "oneof": [ + "responseCompressionCodec" + ] } }, "fields": { @@ -912,6 +930,22 @@ "(google.api.field_behavior)": "OPTIONAL", "proto3_optional": true } + }, + "responseCompressionCodec": { + "type": "ResponseCompressionCodec", + "id": 6, + "options": { + "(google.api.field_behavior)": "OPTIONAL", + "proto3_optional": true + } + } + }, + "nested": { + "ResponseCompressionCodec": { + "values": { + "RESPONSE_COMPRESSION_CODEC_UNSPECIFIED": 0, + "RESPONSE_COMPRESSION_CODEC_LZ4": 2 + } } } } diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index 829e5ce9cc3..cf2b960fb0c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index a38ba5618fe..6489af416f6 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index e3522593f09..a5679f5f9ca 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 30c28eabeeb..1f1cb4d9476 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -114,7 +114,7 @@ function main(writeStream) { stream.on('error', (err) => { throw(err) }); stream.on('end', () => { /* API call completed */ }); stream.write(request); - stream.end(); + stream.end(); } callAppendRows(); diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index fe982cecb2e..da96822c57c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index 2eeb3f4a1e7..d40cd34d21d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index f9766ec5286..c112283cd7c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index c9dd16b7f9a..4eb328d0608 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 83e7385822a..23813d55847 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index 015a92acd58..511f3008909 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index b1dafbef295..03c79337d1b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index 41a6abd1926..c5d36550b95 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index 2c95aad2222..fadb7b1ea49 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index dfaef414098..2e8ccd036c4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index e0dbfcad645..467a9523bb4 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); + /** * Client JSON configuration object, loaded from * `src/v1/big_query_read_client_config.json`. @@ -50,6 +51,8 @@ export class BigQueryReadClient { private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; + private _universeDomain: string; + private _servicePath: string; auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -107,8 +110,20 @@ export class BigQueryReadClient { ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryReadClient; + if ( + opts?.universe_domain && + opts?.universeDomain && + opts?.universe_domain !== opts?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.' + ); + } + this._universeDomain = + opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + opts?.servicePath || opts?.apiEndpoint || this._servicePath; this._providedCustomServicePath = !!( opts?.servicePath || opts?.apiEndpoint ); @@ -120,7 +135,7 @@ export class BigQueryReadClient { opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + if (servicePath !== this._servicePath && !('scopes' in opts)) { opts['scopes'] = staticMembers.scopes; } @@ -145,10 +160,10 @@ export class BigQueryReadClient { this.auth.useJWTAccessWithScope = true; // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; + this.auth.defaultServicePath = this._servicePath; // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { + if (servicePath === this._servicePath) { this.auth.defaultScopes = staticMembers.scopes; } @@ -196,7 +211,8 @@ export class BigQueryReadClient { this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, - !!opts.fallback + !!opts.fallback, + /* gaxStreamingRetries: */ false ), }; @@ -297,21 +313,52 @@ export class BigQueryReadClient { /** * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get servicePath() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static servicePath is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get apiEndpoint() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static apiEndpoint is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + + get universeDomain() { + return this._universeDomain; + } + /** * The port for this API service. * @returns {number} The default port for this service. diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 0147a17887c..815630029fa 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); + /** * Client JSON configuration object, loaded from * `src/v1/big_query_write_client_config.json`. @@ -53,6 +54,8 @@ export class BigQueryWriteClient { private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; + private _universeDomain: string; + private _servicePath: string; auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -110,8 +113,20 @@ export class BigQueryWriteClient { ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryWriteClient; + if ( + opts?.universe_domain && + opts?.universeDomain && + opts?.universe_domain !== opts?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.' + ); + } + this._universeDomain = + opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + opts?.servicePath || opts?.apiEndpoint || this._servicePath; this._providedCustomServicePath = !!( opts?.servicePath || opts?.apiEndpoint ); @@ -123,7 +138,7 @@ export class BigQueryWriteClient { opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + if (servicePath !== this._servicePath && !('scopes' in opts)) { opts['scopes'] = staticMembers.scopes; } @@ -148,10 +163,10 @@ export class BigQueryWriteClient { this.auth.useJWTAccessWithScope = true; // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; + this.auth.defaultServicePath = this._servicePath; // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { + if (servicePath === this._servicePath) { this.auth.defaultScopes = staticMembers.scopes; } @@ -199,7 +214,8 @@ export class BigQueryWriteClient { this.descriptors.stream = { appendRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, - !!opts.fallback + !!opts.fallback, + /* gaxStreamingRetries: */ false ), }; @@ -303,21 +319,52 @@ export class BigQueryWriteClient { /** * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get servicePath() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static servicePath is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get apiEndpoint() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static apiEndpoint is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + + get universeDomain() { + return this._universeDomain; + } + /** * The port for this API service. * @returns {number} The default port for this service. diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index 3adaec0e2a4..3fc000e2add 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index afcf322614b..8f175f5384a 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); + /** * Client JSON configuration object, loaded from * `src/v1beta1/big_query_storage_client_config.json`. @@ -55,6 +56,8 @@ export class BigQueryStorageClient { private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; + private _universeDomain: string; + private _servicePath: string; auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -112,8 +115,20 @@ export class BigQueryStorageClient { ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryStorageClient; + if ( + opts?.universe_domain && + opts?.universeDomain && + opts?.universe_domain !== opts?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.' + ); + } + this._universeDomain = + opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = - opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + opts?.servicePath || opts?.apiEndpoint || this._servicePath; this._providedCustomServicePath = !!( opts?.servicePath || opts?.apiEndpoint ); @@ -125,7 +140,7 @@ export class BigQueryStorageClient { opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + if (servicePath !== this._servicePath && !('scopes' in opts)) { opts['scopes'] = staticMembers.scopes; } @@ -150,10 +165,10 @@ export class BigQueryStorageClient { this.auth.useJWTAccessWithScope = true; // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; + this.auth.defaultServicePath = this._servicePath; // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { + if (servicePath === this._servicePath) { this.auth.defaultScopes = staticMembers.scopes; } @@ -195,7 +210,8 @@ export class BigQueryStorageClient { this.descriptors.stream = { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, - !!opts.fallback + !!opts.fallback, + /* gaxStreamingRetries: */ false ), }; @@ -299,21 +315,52 @@ export class BigQueryStorageClient { /** * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get servicePath() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static servicePath is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. * @returns {string} The DNS address for this service. */ static get apiEndpoint() { + if ( + typeof process !== undefined && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static apiEndpoint is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } return 'bigquerystorage.googleapis.com'; } + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + + get universeDomain() { + return this._universeDomain; + } + /** * The port for this API service. * @returns {number} The default port for this service. diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index fddf65e1444..709e4bb9416 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index c0874e08c69..6cfddb44e1d 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** + /* eslint-disable node/no-missing-require, no-unused-vars */ const storage = require('@google-cloud/bigquery-storage'); diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 330b1557866..60e17b1986f 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -16,10 +16,7 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import { - BigQueryReadClient, - BigQueryWriteClient, -} from '@google-cloud/bigquery-storage'; +import {BigQueryReadClient, BigQueryWriteClient} from '@google-cloud/bigquery-storage'; // check that the client class type name can be used function doStuffWithBigQueryReadClient(client: BigQueryReadClient) { diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index f61fe236476..83b83f332c3 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index c178c3468b6..d49d2fb0782 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -89,14 +89,62 @@ function stubServerStreamingCall( describe('v1.BigQueryReadClient', () => { describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = bigqueryreadModule.v1.BigQueryReadClient.servicePath; - assert(servicePath); + it('has apiEndpoint', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + const apiEndpoint = client.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); }); - it('has apiEndpoint', () => { - const apiEndpoint = bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; - assert(apiEndpoint); + it('has universeDomain', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + const universeDomain = client.universeDomain; + assert.strictEqual(universeDomain, 'googleapis.com'); + }); + + if ( + typeof process !== 'undefined' && + typeof process.emitWarning === 'function' + ) { + it('throws DeprecationWarning if static servicePath is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const servicePath = + bigqueryreadModule.v1.BigQueryReadClient.servicePath; + assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + + it('throws DeprecationWarning if static apiEndpoint is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const apiEndpoint = + bigqueryreadModule.v1.BigQueryReadClient.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + } + it('sets apiEndpoint according to universe domain camelCase', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + universeDomain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + it('sets apiEndpoint according to universe domain snakeCase', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + universe_domain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + it('does not allow setting both universeDomain and universe_domain', () => { + assert.throws(() => { + new bigqueryreadModule.v1.BigQueryReadClient({ + universe_domain: 'example.com', + universeDomain: 'example.net', + }); + }); }); it('has port', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index ecf41159838..21114fb0db1 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -89,16 +89,62 @@ function stubServerStreamingCall( describe('v1beta1.BigQueryStorageClient', () => { describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = - bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; - assert(servicePath); + it('has apiEndpoint', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + const apiEndpoint = client.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); }); - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; - assert(apiEndpoint); + it('has universeDomain', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + const universeDomain = client.universeDomain; + assert.strictEqual(universeDomain, 'googleapis.com'); + }); + + if ( + typeof process !== 'undefined' && + typeof process.emitWarning === 'function' + ) { + it('throws DeprecationWarning if static servicePath is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const servicePath = + bigquerystorageModule.v1beta1.BigQueryStorageClient.servicePath; + assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + + it('throws DeprecationWarning if static apiEndpoint is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const apiEndpoint = + bigquerystorageModule.v1beta1.BigQueryStorageClient.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + } + it('sets apiEndpoint according to universe domain camelCase', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + universeDomain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + it('sets apiEndpoint according to universe domain snakeCase', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + universe_domain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + it('does not allow setting both universeDomain and universe_domain', () => { + assert.throws(() => { + new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + universe_domain: 'example.com', + universeDomain: 'example.net', + }); + }); }); it('has port', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index e84e33a6fbd..4fa698a1ba3 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -82,16 +82,62 @@ function stubBidiStreamingCall( describe('v1.BigQueryWriteClient', () => { describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = - bigquerywriteModule.v1.BigQueryWriteClient.servicePath; - assert(servicePath); + it('has apiEndpoint', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + const apiEndpoint = client.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); }); - it('has apiEndpoint', () => { - const apiEndpoint = - bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; - assert(apiEndpoint); + it('has universeDomain', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + const universeDomain = client.universeDomain; + assert.strictEqual(universeDomain, 'googleapis.com'); + }); + + if ( + typeof process !== 'undefined' && + typeof process.emitWarning === 'function' + ) { + it('throws DeprecationWarning if static servicePath is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const servicePath = + bigquerywriteModule.v1.BigQueryWriteClient.servicePath; + assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + + it('throws DeprecationWarning if static apiEndpoint is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const apiEndpoint = + bigquerywriteModule.v1.BigQueryWriteClient.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + } + it('sets apiEndpoint according to universe domain camelCase', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + universeDomain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + it('sets apiEndpoint according to universe domain snakeCase', () => { + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + universe_domain: 'example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + it('does not allow setting both universeDomain and universe_domain', () => { + assert.throws(() => { + new bigquerywriteModule.v1.BigQueryWriteClient({ + universe_domain: 'example.com', + universeDomain: 'example.net', + }); + }); }); it('has port', () => { From c9961b35321a8e71f120dd3c7a27936f1ca54b24 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 13:37:47 -0400 Subject: [PATCH 253/333] chore: update cloud-rad version to ^0.4.0 (#398) Source-Link: https://github.com/googleapis/synthtool/commit/1063ef32bfe41b112bade7a2dfad4e84d0058ebd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:e92044720ab3cb6984a70b0c6001081204375959ba3599ef6c42dd99a7783a67 Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 40b49d2bf81..638efabfb52 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:abc68a9bbf4fa808b25fa16d3b11141059dc757dbc34f024744bba36c200b40f -# created: 2023-10-04T20:56:40.710775365Z + digest: sha256:e92044720ab3cb6984a70b0c6001081204375959ba3599ef6c42dd99a7783a67 +# created: 2023-11-10T00:24:05.581078808Z diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh index 3596c1e4cb1..81a89f6c172 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh @@ -25,6 +25,6 @@ if [[ -z "$CREDENTIALS" ]]; then fi npm install -npm install --no-save @google-cloud/cloud-rad@^0.3.7 +npm install --no-save @google-cloud/cloud-rad@^0.4.0 # publish docs to devsite npx @google-cloud/cloud-rad . cloud-rad From 3a8d3272fb23aaba7ddbcb7a0160d73e61175224 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 20 Feb 2024 13:01:44 -0800 Subject: [PATCH 254/333] feat: update libs to support TPC Universes (#415) --- handwritten/bigquery-storage/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index e259e1da0ea..26b81dd42f7 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,7 +27,8 @@ "precompile": "gts clean" }, "dependencies": { - "google-gax": "^4.0.3" + "google-gax": "^4.3.1", + "google-auth-library": "^9.6.3" }, "peerDependencies": { "protobufjs": "^7.2.4" From 4c9d10b579d7f1f08491a83cd92965a0d082be92 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:09:32 -0400 Subject: [PATCH 255/333] chore(main): release 4.3.0 (#418) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.3.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...pet_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 861ea70132b..fb326f55851 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.3.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.2.1...v4.3.0) (2024-02-20) + + +### Features + +* Update libs to support TPC Universes ([#415](https://github.com/googleapis/nodejs-bigquery-storage/issues/415)) ([8ee9e75](https://github.com/googleapis/nodejs-bigquery-storage/commit/8ee9e75c2f3de4dee66f710e117c3577c0bef993)) + ## [4.2.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.2.0...v4.2.1) (2024-02-06) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 26b81dd42f7..d52fd9f2776 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.2.1", + "version": "4.3.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index c64e3155b51..38bfb651280 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.1", + "version": "4.3.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index de3c2525992..faf3c3e3e8a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.1", + "version": "4.3.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index c8144a014a7..46e72dfd901 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.1", + "version": "4.3.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index c8144a014a7..46e72dfd901 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.2.1", + "version": "4.3.0", "language": "TYPESCRIPT", "apis": [ { From 1290888ad47d6119e21948c1d92594cb6b0d4c74 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 2 Mar 2024 10:20:16 +0100 Subject: [PATCH 256/333] chore(deps): update dependency gapic-tools to ^0.4.0 (#425) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [gapic-tools](https://togithub.com/googleapis/gax-nodejs) ([source](https://togithub.com/googleapis/gax-nodejs/tree/HEAD/gapic-tools)) | [`^0.3.0` -> `^0.4.0`](https://renovatebot.com/diffs/npm/gapic-tools/0.3.0/0.4.0) | [![age](https://developer.mend.io/api/mc/badges/age/npm/gapic-tools/0.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/gapic-tools/0.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/gapic-tools/0.3.0/0.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/gapic-tools/0.3.0/0.4.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/gax-nodejs (gapic-tools) ### [`v0.4.0`](https://togithub.com/googleapis/gax-nodejs/releases/tag/gapic-tools-v0.4.0): gapic-tools: v0.4.0 [Compare Source](https://togithub.com/googleapis/gax-nodejs/compare/gapic-tools-v0.3.0...gapic-tools-v0.4.0) ##### Features - allow passing --keep-case and --force-number to compileProtos ([#​1561](https://togithub.com/googleapis/gax-nodejs/issues/1561)) ([004d112](https://togithub.com/googleapis/gax-nodejs/commit/004d112445f528a6cb143676e8b397b37137adf3))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://developer.mend.io/github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d52fd9f2776..a1ce8f84261 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -40,7 +40,7 @@ "@types/node": "^20.0.0", "@types/sinon": "^17.0.0", "c8": "^9.0.0", - "gapic-tools": "^0.3.0", + "gapic-tools": "^0.4.0", "gts": "^5.0.0", "jsdoc": "^4.0.0", "jsdoc-fresh": "^3.0.0", From 8ff7c66991c00f4bf8436794cab940a9cef68b3f Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 20 Mar 2024 13:35:44 -0700 Subject: [PATCH 257/333] feat: automatically convert date/datetime/timestamps (#422) --- handwritten/bigquery-storage/package.json | 6 +- .../src/managedwriter/encoder.ts | 133 ++++++++++++++++++ .../src/managedwriter/index.ts | 2 +- .../src/managedwriter/json_writer.ts | 33 ++--- .../system-test/managed_writer_client_test.ts | 71 ++++++++++ 5 files changed, 221 insertions(+), 24 deletions(-) create mode 100644 handwritten/bigquery-storage/src/managedwriter/encoder.ts diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a1ce8f84261..a3bd859b943 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,6 +27,7 @@ "precompile": "gts clean" }, "dependencies": { + "extend": "^3.0.2", "google-gax": "^4.3.1", "google-auth-library": "^9.6.3" }, @@ -35,10 +36,11 @@ }, "devDependencies": { "@google-cloud/bigquery": "^7.0.0", - "@types/uuid": "^9.0.1", + "@types/extend": "^3.0.4", "@types/mocha": "^9.0.0", "@types/node": "^20.0.0", "@types/sinon": "^17.0.0", + "@types/uuid": "^9.0.1", "c8": "^9.0.0", "gapic-tools": "^0.4.0", "gts": "^5.0.0", @@ -51,8 +53,8 @@ "pack-n-play": "^2.0.0", "sinon": "^17.0.0", "ts-loader": "^9.0.0", - "uuid": "^9.0.0", "typescript": "^5.1.6", + "uuid": "^9.0.0", "webpack": "^5.0.0", "webpack-cli": "^5.0.0" }, diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts new file mode 100644 index 00000000000..e54b8139dcb --- /dev/null +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -0,0 +1,133 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protobuf from 'protobufjs'; +import * as protos from '../../protos/protos'; +import {normalizeDescriptor} from '../adapt/proto'; +import * as extend from 'extend'; + +type IDescriptorProto = protos.google.protobuf.IDescriptorProto; +type DescriptorProto = protos.google.protobuf.DescriptorProto; + +const DescriptorProto = protos.google.protobuf.DescriptorProto; +const {Type} = protobuf; + +/** + * Internal class used by the JSONWriter to convert JSON data to protobuf messages. + * It can be configure to do some data conversion to match what BigQuery expects. + * + * @class + * @memberof managedwriter + */ +export class JSONEncoder { + private _type: protobuf.Type = Type.fromJSON('root', { + fields: {}, + }); + + /** + * Creates a new JSONEncoder instance. + * + * @param {Object} params - The parameters for the JSONEncoder. + * @param {IDescriptorProto} params.protoDescriptor - The proto descriptor + * for the JSON rows. + */ + constructor(params: {protoDescriptor: IDescriptorProto}) { + const {protoDescriptor} = params; + this.setProtoDescriptor(protoDescriptor); + } + + /** + * Update the proto descriptor for the Encoder. + * + * @param {IDescriptorProto} protoDescriptor - The proto descriptor. + */ + setProtoDescriptor(protoDescriptor: IDescriptorProto): void { + const normalized = normalizeDescriptor( + new DescriptorProto(protoDescriptor) + ); + this._type = Type.fromDescriptor(normalized); + } + + /** + * Writes a JSONList that contains objects to be written to the BigQuery table by first converting + * the JSON data to protobuf messages, then using Writer's appendRows() to write the data at current end + * of stream. If there is a schema update, the current Writer is closed and reopened with the updated schema. + * + * @param {JSONList} rows - The list of JSON rows. + * @returns {Uint8Array[]} The encoded rows. + */ + encodeRows(rows: any[]): Uint8Array[] { + const serializedRows = rows + .map(r => { + return this.convertRow(r); + }) + .map(r => { + return this.encodeRow(r); + }); + return serializedRows; + } + + private isPlainObject(value: any): boolean { + return value && [undefined, Object].includes(value.constructor); + } + + private encodeRow(row: any): Uint8Array { + const msg = this._type.create(row); + return this._type.encode(msg).finish(); + } + + private convertRow(source: any): Object { + const row = extend(true, {}, source); + for (const key in row) { + const value = row[key]; + if (value === null) { + continue; + } + if (value instanceof Date) { + const pfield = this._type.fields[key]; + if (!pfield) { + continue; + } + switch (pfield.type) { + case 'int32': // DATE + // The value is the number of days since the Unix epoch (1970-01-01) + row[key] = value.getTime() / (1000 * 60 * 60 * 24); + break; + case 'int64': // TIMESTAMP + // The value is given in microseconds since the Unix epoch (1970-01-01) + row[key] = value.getTime() * 1000; + break; + case 'string': // DATETIME + row[key] = value.toJSON().replace(/^(.*)T(.*)Z$/, '$1 $2'); + break; + } + continue; + } + if (Array.isArray(value)) { + row[key] = value.map(v => { + if (!this.isPlainObject(v)) { + return v; + } + return this.convertRow(v); + }); + continue; + } + if (this.isPlainObject(value)) { + row[key] = this.convertRow(value); + continue; + } + } + return row; + } +} diff --git a/handwritten/bigquery-storage/src/managedwriter/index.ts b/handwritten/bigquery-storage/src/managedwriter/index.ts index ff6d052311f..0ce72a8eb51 100644 --- a/handwritten/bigquery-storage/src/managedwriter/index.ts +++ b/handwritten/bigquery-storage/src/managedwriter/index.ts @@ -18,7 +18,7 @@ * More information about this new write client may also be found in * the public documentation: https://cloud.google.com/bigquery/docs/write-api * - * It is EXPERIMENTAL and subject to change or removal without notice. This is primarily to signal that this + * It is EXPERIMENTAL and subject to change or removal without notice. This is primarily to signal that this * package may still make breaking changes to existing methods and functionality. * * @namespace managedwriter diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts index 0afe5755434..e4fdb28d680 100644 --- a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -12,25 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as protobuf from 'protobufjs'; import * as protos from '../../protos/protos'; import {PendingWrite} from './pending_write'; import {StreamConnection, RemoveListener} from './stream_connection'; import * as adapt from '../adapt'; import {Writer} from './writer'; +import {JSONEncoder} from './encoder'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type IInt64Value = protos.google.protobuf.IInt64Value; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; -type DescriptorProto = protos.google.protobuf.DescriptorProto; -type JSONPrimitive = string | number | boolean | null; -type JSONValue = JSONPrimitive | JSONObject | JSONArray; -type JSONObject = {[member: string]: JSONValue}; -type JSONArray = Array; -type JSONList = Array; - -const DescriptorProto = protos.google.protobuf.DescriptorProto; -const {Type} = protobuf; +export type JSONPrimitive = string | number | boolean | Date | null; +export type JSONValue = JSONPrimitive | JSONObject | JSONArray; +export type JSONObject = {[member: string]: JSONValue}; +export type JSONArray = Array; +export type JSONList = Array; /** * A StreamWriter that can write JSON data to BigQuery tables. The JSONWriter is @@ -47,9 +43,7 @@ const {Type} = protobuf; */ export class JSONWriter { private _writer: Writer; - private _type: protobuf.Type = Type.fromJSON('root', { - fields: {}, - }); + private _encoder: JSONEncoder; private _schemaListener: RemoveListener; /** @@ -67,6 +61,9 @@ export class JSONWriter { }) { const {connection, protoDescriptor} = params; this._writer = new Writer(params); + this._encoder = new JSONEncoder({ + protoDescriptor: params.protoDescriptor, + }); this._schemaListener = connection.onSchemaUpdated(this.onSchemaUpdated); this.setProtoDescriptor(protoDescriptor); } @@ -87,11 +84,8 @@ export class JSONWriter { * @param {IDescriptorProto} protoDescriptor - The proto descriptor. */ setProtoDescriptor(protoDescriptor: IDescriptorProto): void { - const normalized = adapt.normalizeDescriptor( - new DescriptorProto(protoDescriptor) - ); - this._type = Type.fromDescriptor(normalized); this._writer.setProtoDescriptor(protoDescriptor); + this._encoder.setProtoDescriptor(protoDescriptor); } /** @@ -104,10 +98,7 @@ export class JSONWriter { * @returns {managedwriter.PendingWrite} The pending write. */ appendRows(rows: JSONList, offsetValue?: IInt64Value['value']): PendingWrite { - const serializedRows = rows.map(r => { - const msg = this._type.create(r); - return this._type.encode(msg).finish(); - }); + const serializedRows = this._encoder.encodeRows(rows); const pw = this._writer.appendRows( { serializedRows, diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 7b7dfa0b775..714af273b91 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -22,6 +22,7 @@ import * as bigquerywriter from '../src'; import * as protobuf from 'protobufjs'; import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; +import {JSONEncoder} from '../src/managedwriter/encoder'; const {managedwriter, adapt} = bigquerywriter; const {WriterClient, Writer, JSONWriter, parseStorageErrors} = managedwriter; @@ -364,6 +365,76 @@ describe('managedwriter.WriterClient', () => { }); }); + describe('JSONEncoder', () => { + it('should automatically convert date/datetime/timestamps to expect BigQuery format', () => { + const updatedSchema = { + fields: [ + ...(schema.fields || []), + { + name: 'customer_birthday', + type: 'DATE', + }, + { + name: 'customer_created_at', + type: 'DATETIME', + }, + { + name: 'customer_updated_at', + type: 'TIMESTAMP', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(updatedSchema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + const encoder = new JSONEncoder({ + protoDescriptor, + }); + + // Row 1 + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + customer_birthday: new Date('1815-12-10'), + customer_created_at: new Date('2022-01-09T03:49:46.564Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + }; + + // Row 2 + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + customer_birthday: new Date('1912-07-23'), + customer_created_at: new Date('2022-01-09T03:49:46.564Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + }; + + const Proto = Type.fromDescriptor(protoDescriptor); + const encoded = encoder.encodeRows([row1, row2]); + + const encodedRow1 = encoded[0]; + const decodedRow1 = Proto.decode(encodedRow1).toJSON(); + assert.deepEqual(decodedRow1, { + customer_name: 'Ada Lovelace', + row_num: 1, + customer_birthday: -56270, + customer_created_at: '2022-01-09 03:49:46.564', + customer_updated_at: 1673236186564000, + }); + + const encodedRow2 = encoded[1]; + const decodedRow2 = Proto.decode(encodedRow2).toJSON(); + assert.deepEqual(decodedRow2, { + customer_name: 'Alan Turing', + row_num: 2, + customer_birthday: -20981, + customer_created_at: '2022-01-09 03:49:46.564', + customer_updated_at: 1673236186564000, + }); + }); + }); + describe('JSONWriter', () => { it('should invoke appendRows without errors', async () => { bqWriteClient.initialize(); From ba35d46c2397453dd6129712199db5e5f4715ff8 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 20 Mar 2024 13:54:07 -0700 Subject: [PATCH 258/333] fix: handle more scenarios for stream reconnection (#429) --- .../src/managedwriter/stream_connection.ts | 27 ++---- .../src/managedwriter/writer_client.ts | 6 +- .../system-test/managed_writer_client_test.ts | 89 +++++++++++++++++++ 3 files changed, 100 insertions(+), 22 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 995fa7d60ff..f0c91d7ac34 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -133,25 +133,14 @@ export class StreamConnection extends EventEmitter { }; private shouldReconnect(err: gax.GoogleError): boolean { - if ( - err.code && - [gax.Status.UNAVAILABLE, gax.Status.RESOURCE_EXHAUSTED].includes( - err.code - ) && - err.message - ) { - const detail = err.message.toLowerCase(); - const knownErrors = [ - 'service is currently unavailable', // schema mismatch - 'read econnreset', // idle connection reset - 'bandwidth exhausted', - 'memory limit exceeded', - ]; - const isKnownError = - knownErrors.findIndex(err => detail.includes(err)) !== -1; - return isKnownError; - } - return false; + const reconnectionErrorCodes = [ + gax.Status.UNAVAILABLE, + gax.Status.RESOURCE_EXHAUSTED, + gax.Status.ABORTED, + gax.Status.CANCELLED, + gax.Status.DEADLINE_EXCEEDED, + ]; + return !!err.code && reconnectionErrorCodes.includes(err.code); } private isPermanentError(err: gax.GoogleError): boolean { diff --git a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts index 8d9d1747333..55721ab66ca 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts @@ -22,7 +22,6 @@ import {StreamConnection} from './stream_connection'; type StreamConnections = { connectionList: StreamConnection[]; - connections: Record; }; type CreateWriteStreamRequest = protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest; @@ -68,7 +67,6 @@ export class WriterClient { }); this._connections = { connectionList: [], - connections: {}, }; this._open = false; } @@ -189,7 +187,6 @@ export class WriterClient { options ); this._connections.connectionList.push(streamConnection); - this._connections.connections[`${streamId}`] = streamConnection; return streamConnection; } catch (err) { throw new Error('managed stream connection failed:' + err); @@ -230,6 +227,9 @@ export class WriterClient { this._connections.connectionList.map(conn => { conn.close(); }); + this._connections = { + connectionList: [], + }; this._open = false; } diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 714af273b91..633d394ccd0 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -16,6 +16,7 @@ import * as assert from 'assert'; import {describe, it, xit} from 'mocha'; import * as uuid from 'uuid'; import * as gax from 'google-gax'; +import * as sinon from 'sinon'; import {BigQuery, TableSchema} from '@google-cloud/bigquery'; import * as protos from '../protos/protos'; import * as bigquerywriter from '../src'; @@ -24,6 +25,9 @@ import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; +const sandbox = sinon.createSandbox(); +afterEach(() => sandbox.restore()); + const {managedwriter, adapt} = bigquerywriter; const {WriterClient, Writer, JSONWriter, parseStorageErrors} = managedwriter; const {Type} = protobuf; @@ -851,6 +855,91 @@ describe('managedwriter.WriterClient', () => { } }); + it('should trigger reconnection given some specific errors', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + let reconnectedCalled = false; + sandbox.stub(connection, 'reconnect').callsFake(() => { + reconnectedCalled = true; + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + try { + // Write some data and trigger error + const pw = writer.appendRows( + [ + { + customer_name: 'Ada Lovelace', + row_num: 1, + }, + { + customer_name: 'Alan Turing', + row_num: 2, + }, + ], + 0 + ); + await pw.getResult(); + + const reconnectErrorCases: gax.GoogleError[] = [ + { + code: gax.Status.ABORTED, + msg: 'Closing the stream because it has been inactive', + }, + { + code: gax.Status.RESOURCE_EXHAUSTED, + msg: 'read econnreset', + }, + { + code: gax.Status.ABORTED, + msg: 'service is currently unavailable', + }, + { + code: gax.Status.RESOURCE_EXHAUSTED, + msg: 'bandwidth exhausted', + }, + { + code: gax.Status.RESOURCE_EXHAUSTED, + msg: 'memory limit exceeded', + }, + { + code: gax.Status.CANCELLED, + msg: 'any', + }, + { + code: gax.Status.DEADLINE_EXCEEDED, + msg: 'a msg', + }, + ].map(err => { + const gerr = new gax.GoogleError(err.msg); + gerr.code = err.code; + return gerr; + }); + for (const gerr of reconnectErrorCases) { + const conn = connection['_connection'] as gax.CancellableStream; // private method + conn.emit('error', gerr); + assert.equal(reconnectedCalled, true); + + reconnectedCalled = false; // reset flag + } + + writer.close(); + } finally { + client.close(); + } + }); + xit('reconnect on idle connection', async () => { bqWriteClient.initialize(); const client = new WriterClient(); From 22cd6b3ec1c23f76329d2e1a596d2567ac5cae38 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 21 Mar 2024 10:42:16 -0700 Subject: [PATCH 259/333] feat: add support for missing value interpretation (#428) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #426 🦕 --- .../src/managedwriter/json_writer.ts | 45 ++++-- .../src/managedwriter/writer.ts | 100 +++++++++++-- .../system-test/managed_writer_client_test.ts | 136 ++++++++++++++++++ 3 files changed, 260 insertions(+), 21 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts index e4fdb28d680..f187c26c898 100644 --- a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -14,12 +14,17 @@ import * as protos from '../../protos/protos'; import {PendingWrite} from './pending_write'; -import {StreamConnection, RemoveListener} from './stream_connection'; +import {RemoveListener} from './stream_connection'; import * as adapt from '../adapt'; -import {Writer} from './writer'; +import {Writer, WriterOptions} from './writer'; import {JSONEncoder} from './encoder'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; +type MissingValueInterpretation = + protos.google.cloud.bigquery.storage.v1.AppendRowsRequest['defaultMissingValueInterpretation']; +type MissingValueInterpretationMap = { + [column: string]: MissingValueInterpretation; +}; type IInt64Value = protos.google.protobuf.IInt64Value; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; export type JSONPrimitive = string | number | boolean | Date | null; @@ -49,16 +54,10 @@ export class JSONWriter { /** * Creates a new JSONWriter instance. * - * @param {Object} params - The parameters for the JSONWriter. - * @param {StreamConnection} params.connection - The stream connection - * to the BigQuery streaming insert operation. - * @param {IDescriptorProto} params.protoDescriptor - The proto descriptor - * for the JSON rows. + * @param {WriterOptions} params - The parameters for the JSONWriter. + * See WriterOptions docs for more information. */ - constructor(params: { - connection: StreamConnection; - protoDescriptor: IDescriptorProto; - }) { + constructor(params: WriterOptions) { const {connection, protoDescriptor} = params; this._writer = new Writer(params); this._encoder = new JSONEncoder({ @@ -88,6 +87,30 @@ export class JSONWriter { this._encoder.setProtoDescriptor(protoDescriptor); } + /** + * Update how missing values are interpreted for the given stream. + * + * @param {MissingValueInterpretation} defaultMissingValueInterpretation + */ + setDefaultMissingValueInterpretation( + defaultMissingValueInterpretation: MissingValueInterpretation + ) { + this._writer.setDefaultMissingValueInterpretation( + defaultMissingValueInterpretation + ); + } + + /** + * Update how missing values are interpreted for individual columns. + * + * @param {MissingValueInterpretationMap} missingValueInterpretations + */ + setMissingValueInterpretations( + missingValueInterpretations: MissingValueInterpretationMap + ) { + this._writer.setMissingValueInterpretations(missingValueInterpretations); + } + /** * Writes a JSONList that contains objects to be written to the BigQuery table by first converting * the JSON data to protobuf messages, then using Writer's appendRows() to write the data at current end diff --git a/handwritten/bigquery-storage/src/managedwriter/writer.ts b/handwritten/bigquery-storage/src/managedwriter/writer.ts index 1a090da6247..04939e0792a 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer.ts @@ -24,9 +24,60 @@ type ProtoData = protos.google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; type DescriptorProto = protos.google.protobuf.DescriptorProto; +type MissingValueInterpretation = + AppendRowRequest['defaultMissingValueInterpretation']; +type MissingValueInterpretationMap = { + [column: string]: MissingValueInterpretation; +}; const DescriptorProto = protos.google.protobuf.DescriptorProto; +export interface WriterOptions { + /** The stream connection to the BigQuery streaming insert operation. */ + connection: StreamConnection; + + /** The proto descriptor for the stream. */ + protoDescriptor: IDescriptorProto; + + /** + * Controls how missing values are interpreted for a given stream. + * `missingValueInterpretations` set for individual columns can override the default chosen + * with this option. + * + * For example, if you want to write + * `NULL` instead of using default values for some columns, you can set + * `defaultMissingValueInterpretation` to `DEFAULT_VALUE` and at the same + * time, set `missingValueInterpretations` to `NULL_VALUE` on those columns. + */ + defaultMissingValueInterpretation?: MissingValueInterpretation; + + /** + * Control how missing values are interpreted for individual columns. + * + * You must provide an object to indicate how to interpret missing value for some fields. Missing + * values are fields present in user schema but missing in rows. The key is + * the field name. The value is the interpretation of missing values for the + * field. + * + * For example, the following option would indicate that missing values in the "foo" + * column are interpreted as null, whereas missing values in the "bar" column are + * treated as the default value: + * + * { + * "foo": 'DEFAULT_VALUE', + * "bar": 'NULL_VALUE', + * } + * + * If a field is not in this object and has missing values, the missing values + * in this field are interpreted as NULL unless overridden with a default missing + * value interpretation. + * + * Currently, field name can only be top-level column name, can't be a struct + * field path like 'foo.bar'. + */ + missingValueInterpretations?: MissingValueInterpretationMap; +} + /** * A BigQuery Storage API Writer that can be used to write data into BigQuery Table * using the Storage API. @@ -37,23 +88,26 @@ const DescriptorProto = protos.google.protobuf.DescriptorProto; export class Writer { private _protoDescriptor: DescriptorProto; private _streamConnection: StreamConnection; + private _defaultMissingValueInterpretation?: MissingValueInterpretation; + private _missingValueInterpretations?: MissingValueInterpretationMap; /** * Creates a new Writer instance. * - * @param {Object} params - The parameters for the JSONWriter. - * @param {StreamConnection} params.connection - The stream connection - * to the BigQuery streaming insert operation. - * @param {IDescriptorProto} params.protoDescriptor - The proto descriptor - * for the JSON rows. + * @param {WriterOptions} params - The parameters for the Writer. + * See WriterOptions docs for more information. */ - constructor(params: { - connection: StreamConnection; - protoDescriptor: IDescriptorProto; - }) { - const {connection, protoDescriptor} = params; + constructor(params: WriterOptions) { + const { + connection, + protoDescriptor, + missingValueInterpretations, + defaultMissingValueInterpretation, + } = params; this._streamConnection = connection; this._protoDescriptor = new DescriptorProto(protoDescriptor); + this._defaultMissingValueInterpretation = defaultMissingValueInterpretation; + this._missingValueInterpretations = missingValueInterpretations; } /** @@ -72,6 +126,28 @@ export class Writer { } } + /** + * Update how missing values are interpreted for the given stream. + * + * @param {MissingValueInterpretation} defaultMissingValueInterpretation + */ + setDefaultMissingValueInterpretation( + defaultMissingValueInterpretation: MissingValueInterpretation + ) { + this._defaultMissingValueInterpretation = defaultMissingValueInterpretation; + } + + /** + * Update how missing values are interpreted for individual columns. + * + * @param {MissingValueInterpretationMap} missingValueInterpretations + */ + setMissingValueInterpretations( + missingValueInterpretations: MissingValueInterpretationMap + ) { + this._missingValueInterpretations = missingValueInterpretations; + } + /** * Schedules the writing of rows at given offset. * @@ -97,6 +173,10 @@ export class Writer { protoDescriptor: this._protoDescriptor.toJSON(), }, }, + defaultMissingValueInterpretation: + this._defaultMissingValueInterpretation, + missingValueInterpretations: this + ._missingValueInterpretations as AppendRowRequest['missingValueInterpretations'], offset, }; diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 633d394ccd0..45438a78732 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -629,6 +629,142 @@ describe('managedwriter.WriterClient', () => { }).timeout(30 * 1000); }); + it('should fill default values when MissingValuesInterpretation is set', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const schema: TableSchema = { + fields: [ + { + name: 'customer_name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'row_num', + type: 'INTEGER', + mode: 'REQUIRED', + }, + { + name: 'id', + type: 'STRING', + defaultValueExpression: 'GENERATE_UUID()', + }, + { + name: 'created_at', + type: 'TIMESTAMP', + defaultValueExpression: 'CURRENT_TIMESTAMP()', + }, + { + name: 'updated_at', + type: 'TIMESTAMP', + defaultValueExpression: 'CURRENT_TIMESTAMP()', + }, + ], + }; + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId + '_default_values', {schema}); + const parent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const streamId = connection.getStreamId(); + const writer = new JSONWriter({ + connection, + protoDescriptor, + defaultMissingValueInterpretation: 'DEFAULT_VALUE', + missingValueInterpretations: { + updated_at: 'NULL_VALUE', + }, + }); + + let pw = writer.appendRows([row1, row2], 0); + let result = await pw.getResult(); + + // change MVI config + writer.setDefaultMissingValueInterpretation('NULL_VALUE'); + writer.setMissingValueInterpretations({ + updated_at: 'DEFAULT_VALUE', + }); + + const row3 = { + customer_name: 'Charles Babbage', + row_num: 3, + }; + + const row4 = { + customer_name: 'Lord Byron', + row_num: 4, + }; + + pw = writer.appendRows([row3, row4], 2); + result = await pw.getResult(); + + assert.equal(result.error, null); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 4); + + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + + const [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by row_num` + ); + assert.strictEqual(rows.length, 4); + + const first = rows[0]; + assert.notEqual(first.id, null); + assert.notEqual(first.created_at, null); + assert.equal(first.updated_at, null); + + const second = rows[1]; + assert.notEqual(second.id, null); + assert.notEqual(second.created_at, null); + assert.equal(second.updated_at, null); + + // After change on MVI config + const third = rows[2]; + assert.equal(third.id, null); + assert.equal(third.created_at, null); + assert.notEqual(third.updated_at, null); + + const forth = rows[3]; + assert.equal(forth.id, null); + assert.equal(forth.created_at, null); + assert.notEqual(forth.updated_at, null); + + writer.close(); + } finally { + client.close(); + } + }); + describe('Error Scenarios', () => { it('send request with mismatched proto descriptor', async () => { bqWriteClient.initialize(); From 9dbbbd1e4ba2ffc3cd436b3267c16a1c9d52b5b1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 14:07:54 -0400 Subject: [PATCH 260/333] docs: mark BigQueryWrite v1beta2 as deprecated (#416) --- .../cloud/bigquery/storage/v1/table.proto | 18 + .../bigquery-storage/protos/protos.d.ts | 106 +++++- handwritten/bigquery-storage/protos/protos.js | 332 ++++++++++++++++++ .../bigquery-storage/protos/protos.json | 21 +- .../src/adapt/proto_mappings.ts | 2 + 5 files changed, 477 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 47629c510e6..7f9dde5f6b7 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -83,6 +83,9 @@ message TableFieldSchema { // JSON, String JSON = 15; + + // RANGE + RANGE = 16; } enum Mode { @@ -96,6 +99,12 @@ message TableFieldSchema { REPEATED = 3; } + // Represents the type of a field element. + message FieldElementType { + // Required. The type of a field element. + Type type = 1 [(google.api.field_behavior) = REQUIRED]; + } + // Required. The field name. The name must contain only letters (a-z, A-Z), // numbers (0-9), or underscores (_), and must start with a letter or // underscore. The maximum length is 128 characters. @@ -168,4 +177,13 @@ message TableFieldSchema { // Optional. A SQL expression to specify the [default value] // (https://cloud.google.com/bigquery/docs/default-values) for this field. string default_value_expression = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The subtype of the RANGE, if the type of this field is RANGE. If + // the type is RANGE, this field is required. Possible values for the field + // element type of a RANGE include: + // * DATE + // * DATETIME + // * TIMESTAMP + FieldElementType range_element_type = 11 + [(google.api.field_behavior) = OPTIONAL]; } diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 0bd59bff934..83eeaa9f6bc 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4262,6 +4262,9 @@ export namespace google { /** TableFieldSchema defaultValueExpression */ defaultValueExpression?: (string|null); + + /** TableFieldSchema rangeElementType */ + rangeElementType?: (google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null); } /** Represents a TableFieldSchema. */ @@ -4300,6 +4303,9 @@ export namespace google { /** TableFieldSchema defaultValueExpression. */ public defaultValueExpression: string; + /** TableFieldSchema rangeElementType. */ + public rangeElementType?: (google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null); + /** * Creates a new TableFieldSchema instance using the specified properties. * @param [properties] Properties to set @@ -4397,7 +4403,8 @@ export namespace google { NUMERIC = 12, BIGNUMERIC = 13, INTERVAL = 14, - JSON = 15 + JSON = 15, + RANGE = 16 } /** Mode enum. */ @@ -4407,6 +4414,103 @@ export namespace google { REQUIRED = 2, REPEATED = 3 } + + /** Properties of a FieldElementType. */ + interface IFieldElementType { + + /** FieldElementType type */ + type?: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null); + } + + /** Represents a FieldElementType. */ + class FieldElementType implements IFieldElementType { + + /** + * Constructs a new FieldElementType. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType); + + /** FieldElementType type. */ + public type: (google.cloud.bigquery.storage.v1.TableFieldSchema.Type|keyof typeof google.cloud.bigquery.storage.v1.TableFieldSchema.Type); + + /** + * Creates a new FieldElementType instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldElementType instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType): google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType; + + /** + * Encodes the specified FieldElementType message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify|verify} messages. + * @param message FieldElementType message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldElementType message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify|verify} messages. + * @param message FieldElementType message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldElementType message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldElementType + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType; + + /** + * Decodes a FieldElementType message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldElementType + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType; + + /** + * Verifies a FieldElementType message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldElementType message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldElementType + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType; + + /** + * Creates a plain object from a FieldElementType message. Also converts values to other types if specified. + * @param message FieldElementType + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldElementType to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldElementType + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } } diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 6cf0ac8b67a..ff8146684af 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -10286,6 +10286,7 @@ * @property {number|Long|null} [precision] TableFieldSchema precision * @property {number|Long|null} [scale] TableFieldSchema scale * @property {string|null} [defaultValueExpression] TableFieldSchema defaultValueExpression + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null} [rangeElementType] TableFieldSchema rangeElementType */ /** @@ -10376,6 +10377,14 @@ */ TableFieldSchema.prototype.defaultValueExpression = ""; + /** + * TableFieldSchema rangeElementType. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null|undefined} rangeElementType + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.rangeElementType = null; + /** * Creates a new TableFieldSchema instance using the specified properties. * @function create @@ -10419,6 +10428,8 @@ writer.uint32(/* id 9, wireType 0 =*/72).int64(message.scale); if (message.defaultValueExpression != null && Object.hasOwnProperty.call(message, "defaultValueExpression")) writer.uint32(/* id 10, wireType 2 =*/82).string(message.defaultValueExpression); + if (message.rangeElementType != null && Object.hasOwnProperty.call(message, "rangeElementType")) + $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.encode(message.rangeElementType, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); return writer; }; @@ -10491,6 +10502,10 @@ message.defaultValueExpression = reader.string(); break; } + case 11: { + message.rangeElementType = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -10549,6 +10564,7 @@ case 13: case 14: case 15: + case 16: break; } if (message.mode != null && message.hasOwnProperty("mode")) @@ -10585,6 +10601,11 @@ if (message.defaultValueExpression != null && message.hasOwnProperty("defaultValueExpression")) if (!$util.isString(message.defaultValueExpression)) return "defaultValueExpression: string expected"; + if (message.rangeElementType != null && message.hasOwnProperty("rangeElementType")) { + var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify(message.rangeElementType); + if (error) + return "rangeElementType." + error; + } return null; }; @@ -10673,6 +10694,10 @@ case 15: message.type = 15; break; + case "RANGE": + case 16: + message.type = 16; + break; } switch (object.mode) { default: @@ -10739,6 +10764,11 @@ message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); if (object.defaultValueExpression != null) message.defaultValueExpression = String(object.defaultValueExpression); + if (object.rangeElementType != null) { + if (typeof object.rangeElementType !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.rangeElementType: object expected"); + message.rangeElementType = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.fromObject(object.rangeElementType); + } return message; }; @@ -10778,6 +10808,7 @@ } else object.scale = options.longs === String ? "0" : 0; object.defaultValueExpression = ""; + object.rangeElementType = null; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -10809,6 +10840,8 @@ object.scale = options.longs === String ? $util.Long.prototype.toString.call(message.scale) : options.longs === Number ? new $util.LongBits(message.scale.low >>> 0, message.scale.high >>> 0).toNumber() : message.scale; if (message.defaultValueExpression != null && message.hasOwnProperty("defaultValueExpression")) object.defaultValueExpression = message.defaultValueExpression; + if (message.rangeElementType != null && message.hasOwnProperty("rangeElementType")) + object.rangeElementType = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.toObject(message.rangeElementType, options); return object; }; @@ -10858,6 +10891,7 @@ * @property {number} BIGNUMERIC=13 BIGNUMERIC value * @property {number} INTERVAL=14 INTERVAL value * @property {number} JSON=15 JSON value + * @property {number} RANGE=16 RANGE value */ TableFieldSchema.Type = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -10877,6 +10911,7 @@ values[valuesById[13] = "BIGNUMERIC"] = 13; values[valuesById[14] = "INTERVAL"] = 14; values[valuesById[15] = "JSON"] = 15; + values[valuesById[16] = "RANGE"] = 16; return values; })(); @@ -10898,6 +10933,303 @@ return values; })(); + TableFieldSchema.FieldElementType = (function() { + + /** + * Properties of a FieldElementType. + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @interface IFieldElementType + * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.Type|null} [type] FieldElementType type + */ + + /** + * Constructs a new FieldElementType. + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @classdesc Represents a FieldElementType. + * @implements IFieldElementType + * @constructor + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType=} [properties] Properties to set + */ + function FieldElementType(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldElementType type. + * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.Type} type + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @instance + */ + FieldElementType.prototype.type = 0; + + /** + * Creates a new FieldElementType instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType} FieldElementType instance + */ + FieldElementType.create = function create(properties) { + return new FieldElementType(properties); + }; + + /** + * Encodes the specified FieldElementType message. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType} message FieldElementType message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldElementType.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.type); + return writer; + }; + + /** + * Encodes the specified FieldElementType message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType} message FieldElementType message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldElementType.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldElementType message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType} FieldElementType + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldElementType.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.type = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldElementType message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType} FieldElementType + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldElementType.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldElementType message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldElementType.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.type != null && message.hasOwnProperty("type")) + switch (message.type) { + default: + return "type: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 10: + case 11: + case 12: + case 13: + case 14: + case 15: + case 16: + break; + } + return null; + }; + + /** + * Creates a FieldElementType message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType} FieldElementType + */ + FieldElementType.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType(); + switch (object.type) { + default: + if (typeof object.type === "number") { + message.type = object.type; + break; + } + break; + case "TYPE_UNSPECIFIED": + case 0: + message.type = 0; + break; + case "STRING": + case 1: + message.type = 1; + break; + case "INT64": + case 2: + message.type = 2; + break; + case "DOUBLE": + case 3: + message.type = 3; + break; + case "STRUCT": + case 4: + message.type = 4; + break; + case "BYTES": + case 5: + message.type = 5; + break; + case "BOOL": + case 6: + message.type = 6; + break; + case "TIMESTAMP": + case 7: + message.type = 7; + break; + case "DATE": + case 8: + message.type = 8; + break; + case "TIME": + case 9: + message.type = 9; + break; + case "DATETIME": + case 10: + message.type = 10; + break; + case "GEOGRAPHY": + case 11: + message.type = 11; + break; + case "NUMERIC": + case 12: + message.type = 12; + break; + case "BIGNUMERIC": + case 13: + message.type = 13; + break; + case "INTERVAL": + case 14: + message.type = 14; + break; + case "JSON": + case 15: + message.type = 15; + break; + case "RANGE": + case 16: + message.type = 16; + break; + } + return message; + }; + + /** + * Creates a plain object from a FieldElementType message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType} message FieldElementType + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldElementType.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.type = options.enums === String ? "TYPE_UNSPECIFIED" : 0; + if (message.type != null && message.hasOwnProperty("type")) + object.type = options.enums === String ? $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] === undefined ? message.type : $root.google.cloud.bigquery.storage.v1.TableFieldSchema.Type[message.type] : message.type; + return object; + }; + + /** + * Converts this FieldElementType to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @instance + * @returns {Object.} JSON object + */ + FieldElementType.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldElementType + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldElementType.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType"; + }; + + return FieldElementType; + })(); + return TableFieldSchema; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 462a8b51cd9..bcb47635e2d 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1120,6 +1120,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "rangeElementType": { + "type": "FieldElementType", + "id": 11, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } }, "nested": { @@ -1140,7 +1147,8 @@ "NUMERIC": 12, "BIGNUMERIC": 13, "INTERVAL": 14, - "JSON": 15 + "JSON": 15, + "RANGE": 16 } }, "Mode": { @@ -1150,6 +1158,17 @@ "REQUIRED": 2, "REPEATED": 3 } + }, + "FieldElementType": { + "fields": { + "type": { + "type": "Type", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } } } } diff --git a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts index b0f7d90e15a..30b87d8524c 100644 --- a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts @@ -67,6 +67,8 @@ export const bqTypeToFieldTypeMap: Record< TYPE_UNSPECIFIED: null, [TableFieldSchema.Type.INTERVAL]: null, INTERVAL: null, + [TableFieldSchema.Type.RANGE]: null, + RANGE: null, }; export const bqModeToFieldLabelMapProto2: Record< From 9837dfd69376bec4d3eb8c7c314f4f937bbbf237 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 14:45:57 -0400 Subject: [PATCH 261/333] chore(main): release 4.4.0 (#430) --- handwritten/bigquery-storage/CHANGELOG.md | 13 +++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...t_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...t_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...adata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...adata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 18 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index fb326f55851..6a6df1766d2 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [4.4.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.3.0...v4.4.0) (2024-03-21) + + +### Features + +* Add support for missing value interpretation ([#428](https://github.com/googleapis/nodejs-bigquery-storage/issues/428)) ([1a3e4ba](https://github.com/googleapis/nodejs-bigquery-storage/commit/1a3e4bac76ef65e353e8bf6fb6780dbf8d0c887e)) +* Automatically convert date/datetime/timestamps ([#422](https://github.com/googleapis/nodejs-bigquery-storage/issues/422)) ([cbc7e94](https://github.com/googleapis/nodejs-bigquery-storage/commit/cbc7e94dfd6f79c038ed4b74c814a84c90a42be0)) + + +### Bug Fixes + +* Handle more scenarios for stream reconnection ([#429](https://github.com/googleapis/nodejs-bigquery-storage/issues/429)) ([e6f9323](https://github.com/googleapis/nodejs-bigquery-storage/commit/e6f93234906bb9f3346c80943660d04b36206d23)) + ## [4.3.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.2.1...v4.3.0) (2024-02-20) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index a3bd859b943..adc63448eab 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.3.0", + "version": "4.4.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 38bfb651280..3a3da09d76e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.3.0", + "version": "4.4.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index faf3c3e3e8a..15838892c89 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.3.0", + "version": "4.4.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 46e72dfd901..ee4e6491b33 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.3.0", + "version": "4.4.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 46e72dfd901..ee4e6491b33 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.3.0", + "version": "4.4.0", "language": "TYPESCRIPT", "apis": [ { From 4ca1df2358899afabca9982ba8471166dd46c201 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 09:59:29 -0400 Subject: [PATCH 262/333] feat: add several fields to manage state of database encryption update (#433) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add several fields to manage state of database encryption update PiperOrigin-RevId: 619289281 Source-Link: https://github.com/googleapis/googleapis/commit/3a7c33486ca758b180c6d11dd4705fa9a22e8576 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6a8c733062d833d11c5245eda50f5108e0e55324 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmE4YzczMzA2MmQ4MzNkMTFjNTI0NWVkYTUwZjUxMDhlMGU1NTMyNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../src/managedwriter/stream_connection.ts | 5 +-- .../src/v1/big_query_read_client.ts | 15 +++++-- .../src/v1/big_query_write_client.ts | 15 +++++-- .../src/v1beta1/big_query_storage_client.ts | 15 +++++-- .../test/gapic_big_query_read_v1.ts | 37 +++++++++++++++++- .../test/gapic_big_query_storage_v1beta1.ts | 39 ++++++++++++++++++- .../test/gapic_big_query_write_v1.ts | 37 +++++++++++++++++- 7 files changed, 145 insertions(+), 18 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index f0c91d7ac34..1a1e3b89332 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -176,9 +176,8 @@ export class StreamConnection extends EventEmitter { } // This header is required so that the BigQuery Storage API // knows which region to route the request to. - callOptions.otherArgs.headers[ - 'x-goog-request-params' - ] = `write_stream=${streamId}`; + callOptions.otherArgs.headers['x-goog-request-params'] = + `write_stream=${streamId}`; return callOptions; } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 467a9523bb4..396581d0cd0 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -119,8 +119,15 @@ export class BigQueryReadClient { 'Please set either universe_domain or universeDomain, but not both.' ); } + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; this._universeDomain = - opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + opts?.universeDomain ?? + opts?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = opts?.servicePath || opts?.apiEndpoint || this._servicePath; @@ -169,7 +176,7 @@ export class BigQueryReadClient { // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { + if (typeof process === 'object' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { clientHeader.push(`gl-web/${this._gaxModule.version}`); @@ -318,7 +325,7 @@ export class BigQueryReadClient { */ static get servicePath() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( @@ -336,7 +343,7 @@ export class BigQueryReadClient { */ static get apiEndpoint() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 815630029fa..8bf17046fb6 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -122,8 +122,15 @@ export class BigQueryWriteClient { 'Please set either universe_domain or universeDomain, but not both.' ); } + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; this._universeDomain = - opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + opts?.universeDomain ?? + opts?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = opts?.servicePath || opts?.apiEndpoint || this._servicePath; @@ -172,7 +179,7 @@ export class BigQueryWriteClient { // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { + if (typeof process === 'object' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { clientHeader.push(`gl-web/${this._gaxModule.version}`); @@ -324,7 +331,7 @@ export class BigQueryWriteClient { */ static get servicePath() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( @@ -342,7 +349,7 @@ export class BigQueryWriteClient { */ static get apiEndpoint() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 8f175f5384a..a8b32943153 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -124,8 +124,15 @@ export class BigQueryStorageClient { 'Please set either universe_domain or universeDomain, but not both.' ); } + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; this._universeDomain = - opts?.universeDomain ?? opts?.universe_domain ?? 'googleapis.com'; + opts?.universeDomain ?? + opts?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; this._servicePath = 'bigquerystorage.' + this._universeDomain; const servicePath = opts?.servicePath || opts?.apiEndpoint || this._servicePath; @@ -174,7 +181,7 @@ export class BigQueryStorageClient { // Determine the client header string. const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; - if (typeof process !== 'undefined' && 'versions' in process) { + if (typeof process === 'object' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { clientHeader.push(`gl-web/${this._gaxModule.version}`); @@ -320,7 +327,7 @@ export class BigQueryStorageClient { */ static get servicePath() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( @@ -338,7 +345,7 @@ export class BigQueryStorageClient { */ static get apiEndpoint() { if ( - typeof process !== undefined && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { process.emitWarning( diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index d49d2fb0782..e6a50c7ee64 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -102,7 +102,7 @@ describe('v1.BigQueryReadClient', () => { }); if ( - typeof process !== 'undefined' && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { it('throws DeprecationWarning if static servicePath is used', () => { @@ -138,6 +138,41 @@ describe('v1.BigQueryReadClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual(servicePath, 'bigquerystorage.example.com'); }); + + if (typeof process === 'object' && 'env' in process) { + describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => { + it('sets apiEndpoint from environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = new bigqueryreadModule.v1.BigQueryReadClient(); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + + it('value configured in code has priority over environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + universeDomain: 'configured.example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual( + servicePath, + 'bigquerystorage.configured.example.com' + ); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + }); + } it('does not allow setting both universeDomain and universe_domain', () => { assert.throws(() => { new bigqueryreadModule.v1.BigQueryReadClient({ diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 21114fb0db1..0b74bd11430 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -102,7 +102,7 @@ describe('v1beta1.BigQueryStorageClient', () => { }); if ( - typeof process !== 'undefined' && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { it('throws DeprecationWarning if static servicePath is used', () => { @@ -138,6 +138,43 @@ describe('v1beta1.BigQueryStorageClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual(servicePath, 'bigquerystorage.example.com'); }); + + if (typeof process === 'object' && 'env' in process) { + describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => { + it('sets apiEndpoint from environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new bigquerystorageModule.v1beta1.BigQueryStorageClient(); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + + it('value configured in code has priority over environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + universeDomain: 'configured.example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual( + servicePath, + 'bigquerystorage.configured.example.com' + ); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + }); + } it('does not allow setting both universeDomain and universe_domain', () => { assert.throws(() => { new bigquerystorageModule.v1beta1.BigQueryStorageClient({ diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index 4fa698a1ba3..9d4f4765225 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -95,7 +95,7 @@ describe('v1.BigQueryWriteClient', () => { }); if ( - typeof process !== 'undefined' && + typeof process === 'object' && typeof process.emitWarning === 'function' ) { it('throws DeprecationWarning if static servicePath is used', () => { @@ -131,6 +131,41 @@ describe('v1.BigQueryWriteClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual(servicePath, 'bigquerystorage.example.com'); }); + + if (typeof process === 'object' && 'env' in process) { + describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => { + it('sets apiEndpoint from environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient(); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + + it('value configured in code has priority over environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = new bigquerywriteModule.v1.BigQueryWriteClient({ + universeDomain: 'configured.example.com', + }); + const servicePath = client.apiEndpoint; + assert.strictEqual( + servicePath, + 'bigquerystorage.configured.example.com' + ); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + }); + } it('does not allow setting both universeDomain and universe_domain', () => { assert.throws(() => { new bigquerywriteModule.v1.BigQueryWriteClient({ From 2767108a01fba806852ac904c6c286f5136c1c97 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 8 Apr 2024 09:46:27 -0700 Subject: [PATCH 263/333] feat: auto convert numbers (#436) --- .../bigquery-storage/protos/protos.d.ts | 9 +++ handwritten/bigquery-storage/protos/protos.js | 53 ++++++++++++++-- .../bigquery-storage/protos/protos.json | 26 +++++++- .../src/managedwriter/encoder.ts | 17 ++++-- .../system-test/managed_writer_client_test.ts | 60 +++++++++++++++++++ 5 files changed, 155 insertions(+), 10 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 83eeaa9f6bc..6a9bd20d350 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -9702,6 +9702,9 @@ export namespace google { /** ServiceOptions .google.api.oauthScopes */ ".google.api.oauthScopes"?: (string|null); + + /** ServiceOptions .google.api.apiVersion */ + ".google.api.apiVersion"?: (string|null); } /** Represents a ServiceOptions. */ @@ -12939,6 +12942,9 @@ export namespace google { /** Publishing protoReferenceDocumentationUri */ protoReferenceDocumentationUri?: (string|null); + + /** Publishing restReferenceDocumentationUri */ + restReferenceDocumentationUri?: (string|null); } /** Represents a Publishing. */ @@ -12980,6 +12986,9 @@ export namespace google { /** Publishing protoReferenceDocumentationUri. */ public protoReferenceDocumentationUri: string; + /** Publishing restReferenceDocumentationUri. */ + public restReferenceDocumentationUri: string; + /** * Creates a new Publishing instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index ff8146684af..f27ee65ad6a 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -23295,12 +23295,9 @@ if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) { - writer.uint32(/* id 1052, wireType 2 =*/8418).fork(); + if (message[".google.api.fieldBehavior"] != null && message[".google.api.fieldBehavior"].length) for (var i = 0; i < message[".google.api.fieldBehavior"].length; ++i) - writer.int32(message[".google.api.fieldBehavior"][i]); - writer.ldelim(); - } + writer.uint32(/* id 1052, wireType 0 =*/8416).int32(message[".google.api.fieldBehavior"][i]); if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); if (message[".google.cloud.bigquery.storage.v1.columnName"] != null && Object.hasOwnProperty.call(message, ".google.cloud.bigquery.storage.v1.columnName")) @@ -25160,6 +25157,7 @@ * @property {Array.|null} [uninterpretedOption] ServiceOptions uninterpretedOption * @property {string|null} [".google.api.defaultHost"] ServiceOptions .google.api.defaultHost * @property {string|null} [".google.api.oauthScopes"] ServiceOptions .google.api.oauthScopes + * @property {string|null} [".google.api.apiVersion"] ServiceOptions .google.api.apiVersion */ /** @@ -25218,6 +25216,14 @@ */ ServiceOptions.prototype[".google.api.oauthScopes"] = ""; + /** + * ServiceOptions .google.api.apiVersion. + * @member {string} .google.api.apiVersion + * @memberof google.protobuf.ServiceOptions + * @instance + */ + ServiceOptions.prototype[".google.api.apiVersion"] = ""; + /** * Creates a new ServiceOptions instance using the specified properties. * @function create @@ -25253,6 +25259,8 @@ writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); + if (message[".google.api.apiVersion"] != null && Object.hasOwnProperty.call(message, ".google.api.apiVersion")) + writer.uint32(/* id 525000001, wireType 2 =*/4200000010).string(message[".google.api.apiVersion"]); return writer; }; @@ -25309,6 +25317,10 @@ message[".google.api.oauthScopes"] = reader.string(); break; } + case 525000001: { + message[".google.api.apiVersion"] = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -25367,6 +25379,9 @@ if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) if (!$util.isString(message[".google.api.oauthScopes"])) return ".google.api.oauthScopes: string expected"; + if (message[".google.api.apiVersion"] != null && message.hasOwnProperty(".google.api.apiVersion")) + if (!$util.isString(message[".google.api.apiVersion"])) + return ".google.api.apiVersion: string expected"; return null; }; @@ -25403,6 +25418,8 @@ message[".google.api.defaultHost"] = String(object[".google.api.defaultHost"]); if (object[".google.api.oauthScopes"] != null) message[".google.api.oauthScopes"] = String(object[".google.api.oauthScopes"]); + if (object[".google.api.apiVersion"] != null) + message[".google.api.apiVersion"] = String(object[".google.api.apiVersion"]); return message; }; @@ -25426,6 +25443,7 @@ object.features = null; object[".google.api.defaultHost"] = ""; object[".google.api.oauthScopes"] = ""; + object[".google.api.apiVersion"] = ""; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; @@ -25440,6 +25458,8 @@ object[".google.api.defaultHost"] = message[".google.api.defaultHost"]; if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) object[".google.api.oauthScopes"] = message[".google.api.oauthScopes"]; + if (message[".google.api.apiVersion"] != null && message.hasOwnProperty(".google.api.apiVersion")) + object[".google.api.apiVersion"] = message[".google.api.apiVersion"]; return object; }; @@ -33487,6 +33507,7 @@ * @property {google.api.ClientLibraryOrganization|null} [organization] Publishing organization * @property {Array.|null} [librarySettings] Publishing librarySettings * @property {string|null} [protoReferenceDocumentationUri] Publishing protoReferenceDocumentationUri + * @property {string|null} [restReferenceDocumentationUri] Publishing restReferenceDocumentationUri */ /** @@ -33587,6 +33608,14 @@ */ Publishing.prototype.protoReferenceDocumentationUri = ""; + /** + * Publishing restReferenceDocumentationUri. + * @member {string} restReferenceDocumentationUri + * @memberof google.api.Publishing + * @instance + */ + Publishing.prototype.restReferenceDocumentationUri = ""; + /** * Creates a new Publishing instance using the specified properties. * @function create @@ -33634,6 +33663,8 @@ $root.google.api.ClientLibrarySettings.encode(message.librarySettings[i], writer.uint32(/* id 109, wireType 2 =*/874).fork()).ldelim(); if (message.protoReferenceDocumentationUri != null && Object.hasOwnProperty.call(message, "protoReferenceDocumentationUri")) writer.uint32(/* id 110, wireType 2 =*/882).string(message.protoReferenceDocumentationUri); + if (message.restReferenceDocumentationUri != null && Object.hasOwnProperty.call(message, "restReferenceDocumentationUri")) + writer.uint32(/* id 111, wireType 2 =*/890).string(message.restReferenceDocumentationUri); return writer; }; @@ -33714,6 +33745,10 @@ message.protoReferenceDocumentationUri = reader.string(); break; } + case 111: { + message.restReferenceDocumentationUri = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -33806,6 +33841,9 @@ if (message.protoReferenceDocumentationUri != null && message.hasOwnProperty("protoReferenceDocumentationUri")) if (!$util.isString(message.protoReferenceDocumentationUri)) return "protoReferenceDocumentationUri: string expected"; + if (message.restReferenceDocumentationUri != null && message.hasOwnProperty("restReferenceDocumentationUri")) + if (!$util.isString(message.restReferenceDocumentationUri)) + return "restReferenceDocumentationUri: string expected"; return null; }; @@ -33900,6 +33938,8 @@ } if (object.protoReferenceDocumentationUri != null) message.protoReferenceDocumentationUri = String(object.protoReferenceDocumentationUri); + if (object.restReferenceDocumentationUri != null) + message.restReferenceDocumentationUri = String(object.restReferenceDocumentationUri); return message; }; @@ -33929,6 +33969,7 @@ object.docTagPrefix = ""; object.organization = options.enums === String ? "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED" : 0; object.protoReferenceDocumentationUri = ""; + object.restReferenceDocumentationUri = ""; } if (message.methodSettings && message.methodSettings.length) { object.methodSettings = []; @@ -33959,6 +34000,8 @@ } if (message.protoReferenceDocumentationUri != null && message.hasOwnProperty("protoReferenceDocumentationUri")) object.protoReferenceDocumentationUri = message.protoReferenceDocumentationUri; + if (message.restReferenceDocumentationUri != null && message.hasOwnProperty("restReferenceDocumentationUri")) + object.restReferenceDocumentationUri = message.restReferenceDocumentationUri; return object; }; diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index bcb47635e2d..63e97dd1808 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -2775,9 +2775,21 @@ 1001, 1001 ], + [ + 1002, + 1002 + ], + [ + 9990, + 9990 + ], [ 9995, 9999 + ], + [ + 10000, + 10000 ] ], "reserved": [ @@ -3172,6 +3184,11 @@ "id": 1050, "extend": "google.protobuf.ServiceOptions" }, + "apiVersion": { + "type": "string", + "id": 525000001, + "extend": "google.protobuf.ServiceOptions" + }, "CommonLanguageSettings": { "fields": { "referenceDocsUri": { @@ -3280,6 +3297,10 @@ "protoReferenceDocumentationUri": { "type": "string", "id": 110 + }, + "restReferenceDocumentationUri": { + "type": "string", + "id": 111 } } }, @@ -3455,7 +3476,10 @@ "rule": "repeated", "type": "google.api.FieldBehavior", "id": 1052, - "extend": "google.protobuf.FieldOptions" + "extend": "google.protobuf.FieldOptions", + "options": { + "packed": false + } }, "FieldBehavior": { "values": { diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts index e54b8139dcb..c822b55ee36 100644 --- a/handwritten/bigquery-storage/src/managedwriter/encoder.ts +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -94,11 +94,11 @@ export class JSONEncoder { if (value === null) { continue; } + const pfield = this._type.fields[key]; + if (!pfield) { + continue; + } if (value instanceof Date) { - const pfield = this._type.fields[key]; - if (!pfield) { - continue; - } switch (pfield.type) { case 'int32': // DATE // The value is the number of days since the Unix epoch (1970-01-01) @@ -114,6 +114,15 @@ export class JSONEncoder { } continue; } + // NUMERIC and BIGNUMERIC integer + if (typeof value === 'number' || typeof value === 'bigint') { + switch (pfield.type) { + case 'string': + row[key] = value.toString(10); + break; + } + continue; + } if (Array.isArray(value)) { row[key] = value.map(v => { if (!this.isPlainObject(v)) { diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 45438a78732..09ce7caecf3 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -437,6 +437,66 @@ describe('managedwriter.WriterClient', () => { customer_updated_at: 1673236186564000, }); }); + + it('should automatically convert numeric/bignumeric to expect BigQuery format', () => { + const updatedSchema = { + fields: [ + ...(schema.fields || []), + { + name: 'customer_points', + type: 'NUMERIC', + }, + { + name: 'customer_funds', + type: 'BIGNUMERIC', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(updatedSchema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + const encoder = new JSONEncoder({ + protoDescriptor, + }); + + // accept plain integers and bigint + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + customer_points: 1234, + customer_funds: BigInt(123456789), + }; + + // accept floats + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + customer_points: 1234.56, + customer_funds: '123456789.001234', // still accept in string + }; + + const Proto = Type.fromDescriptor(protoDescriptor); + const encoded = encoder.encodeRows([row1, row2]); + + const encodedRow1 = encoded[0]; + const decodedRow1 = Proto.decode(encodedRow1).toJSON(); + assert.deepEqual(decodedRow1, { + customer_name: 'Ada Lovelace', + row_num: 1, + customer_points: '1234', + customer_funds: '123456789', + }); + + const encodedRow2 = encoded[1]; + const decodedRow2 = Proto.decode(encodedRow2).toJSON(); + assert.deepEqual(decodedRow2, { + customer_name: 'Alan Turing', + row_num: 2, + customer_points: '1234.56', + customer_funds: '123456789.001234', + }); + }); }); describe('JSONWriter', () => { From a833672d8418533849501cd12d84d8d85e75a886 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 8 Apr 2024 10:57:20 -0700 Subject: [PATCH 264/333] fix: auto convert nested fields (#438) --- .../src/managedwriter/encoder.ts | 113 +++++++++++------- .../src/managedwriter/json_writer.ts | 2 +- .../system-test/managed_writer_client_test.ts | 60 +++++++--- 3 files changed, 117 insertions(+), 58 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts index c822b55ee36..ece1f98de11 100644 --- a/handwritten/bigquery-storage/src/managedwriter/encoder.ts +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -16,6 +16,7 @@ import * as protobuf from 'protobufjs'; import * as protos from '../../protos/protos'; import {normalizeDescriptor} from '../adapt/proto'; import * as extend from 'extend'; +import {JSONObject, JSONValue} from './json_writer'; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; type DescriptorProto = protos.google.protobuf.DescriptorProto; @@ -67,10 +68,10 @@ export class JSONEncoder { * @param {JSONList} rows - The list of JSON rows. * @returns {Uint8Array[]} The encoded rows. */ - encodeRows(rows: any[]): Uint8Array[] { + encodeRows(rows: JSONObject[]): Uint8Array[] { const serializedRows = rows .map(r => { - return this.convertRow(r); + return this.convertRow(r, this._type); }) .map(r => { return this.encodeRow(r); @@ -82,61 +83,87 @@ export class JSONEncoder { return value && [undefined, Object].includes(value.constructor); } - private encodeRow(row: any): Uint8Array { + private encodeRow(row: JSONObject): Uint8Array { const msg = this._type.create(row); return this._type.encode(msg).finish(); } - private convertRow(source: any): Object { + private convertRow(source: JSONObject, ptype: protobuf.Type): JSONObject { const row = extend(true, {}, source); for (const key in row) { const value = row[key]; if (value === null) { continue; } - const pfield = this._type.fields[key]; - if (!pfield) { + const encodedValue = this.encodeRowValue(value, key, ptype); + if (encodedValue === undefined) { continue; } - if (value instanceof Date) { - switch (pfield.type) { - case 'int32': // DATE - // The value is the number of days since the Unix epoch (1970-01-01) - row[key] = value.getTime() / (1000 * 60 * 60 * 24); - break; - case 'int64': // TIMESTAMP - // The value is given in microseconds since the Unix epoch (1970-01-01) - row[key] = value.getTime() * 1000; - break; - case 'string': // DATETIME - row[key] = value.toJSON().replace(/^(.*)T(.*)Z$/, '$1 $2'); - break; - } - continue; - } - // NUMERIC and BIGNUMERIC integer - if (typeof value === 'number' || typeof value === 'bigint') { - switch (pfield.type) { - case 'string': - row[key] = value.toString(10); - break; - } - continue; - } - if (Array.isArray(value)) { - row[key] = value.map(v => { - if (!this.isPlainObject(v)) { - return v; - } - return this.convertRow(v); - }); - continue; + row[key] = encodedValue; + } + return row; + } + + private encodeRowValue( + value: JSONValue, + key: string, + ptype: protobuf.Type + ): JSONValue | undefined { + const pfield = ptype.fields[key]; + if (!pfield) { + return undefined; + } + if (value instanceof Date) { + switch (pfield.type) { + case 'int32': // DATE + // The value is the number of days since the Unix epoch (1970-01-01) + return value.getTime() / (1000 * 60 * 60 * 24); + case 'int64': // TIMESTAMP + // The value is given in microseconds since the Unix epoch (1970-01-01) + return value.getTime() * 1000; + case 'string': // DATETIME + return value.toJSON().replace(/^(.*)T(.*)Z$/, '$1 $2'); } - if (this.isPlainObject(value)) { - row[key] = this.convertRow(value); - continue; + return undefined; + } + // NUMERIC and BIGNUMERIC integer + if (typeof value === 'number' || typeof value === 'bigint') { + switch (pfield.type) { + case 'string': + return value.toString(10); } + return undefined; + } + if (Array.isArray(value)) { + const subType = this.getSubType(key, ptype); + return value.map(v => { + if (this.isPlainObject(v)) { + return this.convertRow(v as JSONObject, subType); + } + const encodedValue = this.encodeRowValue(v, key, subType); + if (encodedValue === undefined) { + return v; + } + return encodedValue; + }); + } + if (this.isPlainObject(value)) { + const subType = this.getSubType(key, ptype); + return this.convertRow(value as JSONObject, subType); + } + return undefined; + } + + private getSubType(key: string, ptype: protobuf.Type): protobuf.Type { + const pfield = ptype.fields[key]; + if (!pfield) { + return ptype; + } + try { + const subType = ptype.lookupTypeOrEnum(pfield.type); + return subType; + } catch (err) { + return ptype; } - return row; } } diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts index f187c26c898..60c9c9d9b03 100644 --- a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -27,7 +27,7 @@ type MissingValueInterpretationMap = { }; type IInt64Value = protos.google.protobuf.IInt64Value; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; -export type JSONPrimitive = string | number | boolean | Date | null; +export type JSONPrimitive = string | number | bigint | boolean | Date | null; export type JSONValue = JSONPrimitive | JSONObject | JSONArray; export type JSONObject = {[member: string]: JSONValue}; export type JSONArray = Array; diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 09ce7caecf3..2b36deb5756 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -371,20 +371,34 @@ describe('managedwriter.WriterClient', () => { describe('JSONEncoder', () => { it('should automatically convert date/datetime/timestamps to expect BigQuery format', () => { - const updatedSchema = { + const updatedSchema: TableSchema = { fields: [ ...(schema.fields || []), { name: 'customer_birthday', type: 'DATE', + mode: 'REQUIRED', }, { - name: 'customer_created_at', - type: 'DATETIME', + name: 'customer_metadata', + type: 'RECORD', + mode: 'REQUIRED', + fields: [ + { + name: 'customer_created_at', + type: 'DATETIME', + mode: 'REQUIRED', + }, + { + name: 'customer_updated_at', + type: 'TIMESTAMP', + }, + ], }, { - name: 'customer_updated_at', + name: 'customer_last_purchase_dates', type: 'TIMESTAMP', + mode: 'REPEATED', }, ], }; @@ -401,8 +415,14 @@ describe('managedwriter.WriterClient', () => { customer_name: 'Ada Lovelace', row_num: 1, customer_birthday: new Date('1815-12-10'), - customer_created_at: new Date('2022-01-09T03:49:46.564Z'), - customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + customer_metadata: { + customer_created_at: new Date('2022-01-09T03:49:46.564Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + }, + customer_last_purchase_dates: [ + new Date('2022-01-09T03:49:46.564Z'), + new Date('2023-01-09T03:49:46.564Z'), + ], }; // Row 2 @@ -410,8 +430,14 @@ describe('managedwriter.WriterClient', () => { customer_name: 'Alan Turing', row_num: 2, customer_birthday: new Date('1912-07-23'), - customer_created_at: new Date('2022-01-09T03:49:46.564Z'), - customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + customer_metadata: { + customer_created_at: new Date('2022-01-09T03:49:46.564Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + }, + customer_last_purchase_dates: [ + new Date('2022-01-09T03:49:46.564Z'), + new Date('2023-01-09T03:49:46.564Z'), + ], }; const Proto = Type.fromDescriptor(protoDescriptor); @@ -421,20 +447,26 @@ describe('managedwriter.WriterClient', () => { const decodedRow1 = Proto.decode(encodedRow1).toJSON(); assert.deepEqual(decodedRow1, { customer_name: 'Ada Lovelace', - row_num: 1, + row_num: '1', customer_birthday: -56270, - customer_created_at: '2022-01-09 03:49:46.564', - customer_updated_at: 1673236186564000, + customer_metadata: { + customer_created_at: '2022-01-09 03:49:46.564', + customer_updated_at: '1673236186564000', + }, + customer_last_purchase_dates: ['1641700186564000', '1673236186564000'], }); const encodedRow2 = encoded[1]; const decodedRow2 = Proto.decode(encodedRow2).toJSON(); assert.deepEqual(decodedRow2, { customer_name: 'Alan Turing', - row_num: 2, + row_num: '2', customer_birthday: -20981, - customer_created_at: '2022-01-09 03:49:46.564', - customer_updated_at: 1673236186564000, + customer_metadata: { + customer_created_at: '2022-01-09 03:49:46.564', + customer_updated_at: '1673236186564000', + }, + customer_last_purchase_dates: ['1641700186564000', '1673236186564000'], }); }); From 98109412086530dd91db91b70ccef735d79adb9d Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 10 Apr 2024 12:58:43 -0700 Subject: [PATCH 265/333] fix: trigger reconnection on INTERNAL status error (#435) --- .../bigquery-storage/src/managedwriter/stream_connection.ts | 1 + .../system-test/managed_writer_client_test.ts | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 1a1e3b89332..492bf2582bc 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -139,6 +139,7 @@ export class StreamConnection extends EventEmitter { gax.Status.ABORTED, gax.Status.CANCELLED, gax.Status.DEADLINE_EXCEEDED, + gax.Status.INTERNAL, ]; return !!err.code && reconnectionErrorCodes.includes(err.code); } diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 2b36deb5756..77b03b69c7d 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1149,6 +1149,10 @@ describe('managedwriter.WriterClient', () => { code: gax.Status.DEADLINE_EXCEEDED, msg: 'a msg', }, + { + code: gax.Status.INTERNAL, + msg: 'received RST_STREAM with code', + }, ].map(err => { const gerr = new gax.GoogleError(err.msg); gerr.code = err.code; From 4cd1af370589071ec2dc5077abe907e1e00d3a5b Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 15 Apr 2024 10:45:00 -0700 Subject: [PATCH 266/333] fix: destroy internal grpc conn when closed (#439) --- .../bigquery-storage/src/managedwriter/stream_connection.ts | 1 + .../bigquery-storage/system-test/managed_writer_client_test.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 492bf2582bc..8dcd421a258 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -327,6 +327,7 @@ export class StreamConnection extends EventEmitter { } this._connection.end(); this._connection.removeAllListeners(); + this._connection.destroy(); this._connection = null; } diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 77b03b69c7d..d205d547b21 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1269,6 +1269,7 @@ describe('managedwriter.WriterClient', () => { destinationTable: parent, }); const connection = await client.createStreamConnection({streamId}); + const internalConn = connection['_connection']!; const writer = new Writer({ connection, protoDescriptor, @@ -1284,6 +1285,7 @@ describe('managedwriter.WriterClient', () => { writer.close(); client.close(); assert.strictEqual(client.isOpen(), false); + assert.strictEqual(internalConn.destroyed, true); } finally { client.close(); } From 7a561a7cbd22f1ef2ab788b583646f061542e22d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 14:53:30 -0400 Subject: [PATCH 267/333] chore(main): release 4.5.0 (#434) --- handwritten/bigquery-storage/CHANGELOG.md | 15 +++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...ata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...ata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 20 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 6a6df1766d2..1dd410ef4ee 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [4.5.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.4.0...v4.5.0) (2024-04-15) + + +### Features + +* Add several fields to manage state of database encryption update ([#433](https://github.com/googleapis/nodejs-bigquery-storage/issues/433)) ([cf51780](https://github.com/googleapis/nodejs-bigquery-storage/commit/cf51780f991bc9befb87a812946f977cc8db9350)) +* Auto convert numbers ([#436](https://github.com/googleapis/nodejs-bigquery-storage/issues/436)) ([f0505e7](https://github.com/googleapis/nodejs-bigquery-storage/commit/f0505e7e32baca2eb9133161aa7161bf58e38a28)) + + +### Bug Fixes + +* Auto convert nested fields ([#438](https://github.com/googleapis/nodejs-bigquery-storage/issues/438)) ([0ba5b7d](https://github.com/googleapis/nodejs-bigquery-storage/commit/0ba5b7d592299f47ae285ce15b11157ee9e0207e)) +* Destroy internal grpc conn when closed ([#439](https://github.com/googleapis/nodejs-bigquery-storage/issues/439)) ([e7731bf](https://github.com/googleapis/nodejs-bigquery-storage/commit/e7731bf8f2b35c143a02bcd560fb4afb97936f0c)) +* Trigger reconnection on INTERNAL status error ([#435](https://github.com/googleapis/nodejs-bigquery-storage/issues/435)) ([f555322](https://github.com/googleapis/nodejs-bigquery-storage/commit/f5553220b9b9fe5289f9a1c394d3f964c016f07d)) + ## [4.4.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.3.0...v4.4.0) (2024-03-21) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index adc63448eab..7bf2f1abdca 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.4.0", + "version": "4.5.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 3a3da09d76e..72b191d5959 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.4.0", + "version": "4.5.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 15838892c89..cfb06ecd460 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.4.0", + "version": "4.5.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index ee4e6491b33..35ee1befda1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.4.0", + "version": "4.5.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index ee4e6491b33..35ee1befda1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.4.0", + "version": "4.5.0", "language": "TYPESCRIPT", "apis": [ { From 366f5e2d1fe4424cd6f4e9daac6acf0aea917cf5 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 3 May 2024 07:21:24 -0700 Subject: [PATCH 268/333] feat: enable write retry and nack pending writes on reconnect (#443) --- .../src/managedwriter/pending_write.ts | 23 + .../src/managedwriter/stream_connection.ts | 157 ++++--- .../src/managedwriter/writer.ts | 2 +- .../src/managedwriter/writer_client.ts | 37 ++ .../system-test/managed_writer_client_test.ts | 435 +++++++++++++++--- 5 files changed, 536 insertions(+), 118 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/pending_write.ts b/handwritten/bigquery-storage/src/managedwriter/pending_write.ts index 2e5a748316c..699e133dd9f 100644 --- a/handwritten/bigquery-storage/src/managedwriter/pending_write.ts +++ b/handwritten/bigquery-storage/src/managedwriter/pending_write.ts @@ -28,18 +28,41 @@ type AppendRowRequest = export class PendingWrite { private request: AppendRowRequest; private response?: AppendRowsResponse; + private attempts: number; private promise: Promise; private resolveFunc?: (response: AppendRowsResponse) => void; private rejectFunc?: (reason?: protos.google.rpc.IStatus) => void; constructor(request: AppendRowRequest) { this.request = request; + this.attempts = 0; this.promise = new Promise((resolve, reject) => { this.resolveFunc = resolve; this.rejectFunc = reject; }); } + /** + * Increase number of attempts and return current value. + * + * @private + * @internal + * @returns {number} current number of attempts + */ + _increaseAttempts(): number { + return this.attempts++; + } + + /** + * Resolve pending write with error or AppendRowResponse. + * This resolves the promise accessed via GetResult() + * + * @see GetResult + * + * @private + * @internal + * @returns {number} current number of attempts + */ _markDone(err: Error | null, response?: AppendRowsResponse) { if (err) { this.rejectFunc && this.rejectFunc(err); diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 8dcd421a258..56e05e10865 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -19,7 +19,6 @@ import * as protos from '../../protos/protos'; import {WriterClient} from './writer_client'; import {PendingWrite} from './pending_write'; import {logger} from './logger'; -import {parseStorageErrors} from './error'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type IInt64Value = protos.google.protobuf.IInt64Value; @@ -56,6 +55,7 @@ export class StreamConnection extends EventEmitter { private _streamId: string; private _writeClient: WriterClient; private _connection?: gax.CancellableStream | null; + private _lastConnectionError?: gax.GoogleError | null; private _callOptions?: gax.CallOptions; private _pendingWrites: PendingWrite[]; @@ -76,6 +76,7 @@ export class StreamConnection extends EventEmitter { if (this.isOpen()) { this.close(); } + this._lastConnectionError = null; const callOptions = this.resolveCallOptions( this._streamId, this._callOptions @@ -86,7 +87,23 @@ export class StreamConnection extends EventEmitter { this._connection.on('data', this.handleData); this._connection.on('error', this.handleError); this._connection.on('close', () => { - this.trace('connection closed'); + this.trace('connection closed', this._lastConnectionError); + if (this.hasPendingWrites()) { + const retrySettings = this._writeClient._retrySettings; + if ( + retrySettings.enableWriteRetries && + this.isRetryableError(this._lastConnectionError) + ) { + this.reconnect(); + this.resendAllPendingWrites(); + } else { + const err = new gax.GoogleError( + 'Connection failure, please retry the request' + ); + err.code = gax.Status.UNAVAILABLE; + this.ackAllPendingWrites(err); + } + } }); this._connection.on('pause', () => { this.trace('connection paused'); @@ -106,62 +123,53 @@ export class StreamConnection extends EventEmitter { private handleError = (err: gax.GoogleError) => { this.trace('on error', err, JSON.stringify(err)); - if (this.shouldReconnect(err)) { - this.reconnect(); - return; - } - let nextPendingWrite = this.getNextPendingWrite(); - if (this.isPermanentError(err)) { - this.trace('found permanent error', err); - while (nextPendingWrite) { - this.ackNextPendingWrite(err); - nextPendingWrite = this.getNextPendingWrite(); - } - this.emit('error', err); - return; - } - if (this.isRequestError(err) && nextPendingWrite) { + this._lastConnectionError = err; + const nextPendingWrite = this.getNextPendingWrite(); + if (nextPendingWrite) { this.trace( 'found request error with pending write', err, nextPendingWrite ); - this.ackNextPendingWrite(err); + this.handleRetry(err); + } + if (this.listenerCount('error') === 0 && this.isRetryableError(err)) { return; } this.emit('error', err); }; - private shouldReconnect(err: gax.GoogleError): boolean { - const reconnectionErrorCodes = [ - gax.Status.UNAVAILABLE, - gax.Status.RESOURCE_EXHAUSTED, + private handleRetry(err: gax.GoogleError) { + const retrySettings = this._writeClient._retrySettings; + if (retrySettings.enableWriteRetries && this.isRetryableError(err)) { + if (!this.isConnectionClosed()) { + const pw = this._pendingWrites.pop()!; + this.send(pw); + } + } else { + this.ackNextPendingWrite(err); + } + } + + private isRetryableError(err?: gax.GoogleError | null): boolean { + if (!err) { + return false; + } + const errorCodes = [ gax.Status.ABORTED, + gax.Status.UNAVAILABLE, gax.Status.CANCELLED, - gax.Status.DEADLINE_EXCEEDED, gax.Status.INTERNAL, + gax.Status.DEADLINE_EXCEEDED, ]; - return !!err.code && reconnectionErrorCodes.includes(err.code); + return !!err.code && errorCodes.includes(err.code); } - private isPermanentError(err: gax.GoogleError): boolean { - if (err.code === gax.Status.INVALID_ARGUMENT) { - const storageErrors = parseStorageErrors(err); - for (const storageError of storageErrors) { - if ( - storageError.errorMessage?.includes( - 'Schema mismatch due to extra fields in user schema' - ) - ) { - return true; - } - } + private isConnectionClosed() { + if (this._connection) { + return this._connection.destroyed || this._connection.closed; } - return false; - } - - private isRequestError(err: gax.GoogleError): boolean { - return err.code === gax.Status.INVALID_ARGUMENT; + return true; } private resolveCallOptions( @@ -183,15 +191,23 @@ export class StreamConnection extends EventEmitter { } private handleData = (response: AppendRowsResponse) => { - this.trace('data arrived', response); - const pw = this.getNextPendingWrite(); - if (!pw) { + this.trace('data arrived', response, this._pendingWrites.length); + if (!this.hasPendingWrites()) { this.trace('data arrived with no pending write available', response); return; } if (response.updatedSchema) { this.emit('schemaUpdated', response.updatedSchema); } + const responseErr = response.error; + if (responseErr) { + const gerr = new gax.GoogleError(responseErr.message!); + gerr.code = responseErr.code!; + if (this.isRetryableError(gerr)) { + this.handleRetry(gerr); + return; + } + } this.ackNextPendingWrite(null, response); }; @@ -238,13 +254,38 @@ export class StreamConnection extends EventEmitter { return this._streamId; }; + private hasPendingWrites(): boolean { + return this._pendingWrites.length > 0; + } + private getNextPendingWrite(): PendingWrite | null { if (this._pendingWrites.length > 0) { - return this._pendingWrites[0]; + return this._pendingWrites[this._pendingWrites.length - 1]; } return null; } + private resendAllPendingWrites() { + const pendingWritesToRetry = [...this._pendingWrites]; // copy array; + let pw = pendingWritesToRetry.pop(); + while (pw) { + this._pendingWrites.pop(); // remove from real queue + this.send(pw); // .send immediately adds to the queue + pw = pendingWritesToRetry.pop(); + } + } + + private ackAllPendingWrites( + err: Error | null, + result?: + | protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse + | undefined + ) { + while (this.hasPendingWrites()) { + this.ackNextPendingWrite(err, result); + } + } + private ackNextPendingWrite( err: Error | null, result?: @@ -253,6 +294,7 @@ export class StreamConnection extends EventEmitter { ) { const pw = this._pendingWrites.pop(); if (pw) { + this.trace('ack pending write:', pw, err, result); pw._markDone(err, result); } } @@ -279,23 +321,27 @@ export class StreamConnection extends EventEmitter { } private send(pw: PendingWrite) { - const request = pw.getRequest(); - if (!this._connection) { - pw._markDone(new Error('connection closed')); + const retrySettings = this._writeClient._retrySettings; + const tries = pw._increaseAttempts(); + if (tries > retrySettings.maxRetryAttempts) { + pw._markDone( + new Error(`pending write max retries reached: ${tries} attempts`) + ); return; } - if (this._connection.destroyed || this._connection.closed) { + if (this.isConnectionClosed()) { this.reconnect(); } this.trace('sending pending write', pw); try { - this._connection.write(request, err => { + const request = pw.getRequest(); + this._pendingWrites.unshift(pw); + this._connection?.write(request, err => { this.trace('wrote pending write', err, this._pendingWrites.length); if (err) { pw._markDone(err); //TODO: add retries return; } - this._pendingWrites.unshift(pw); }); } catch (err) { pw._markDone(err as Error); @@ -306,14 +352,16 @@ export class StreamConnection extends EventEmitter { * Check if connection is open and ready to send requests. */ isOpen(): boolean { - return !!this._connection; + return !this.isConnectionClosed(); } /** - * Reconnect and re send inflight requests. + * Re open appendRows BiDi gRPC connection. */ reconnect() { - this.trace('reconnect called'); + this.trace( + `reconnect called with ${this._pendingWrites.length} pending writes` + ); this.close(); this.open(); } @@ -347,7 +395,6 @@ export class StreamConnection extends EventEmitter { async flushRows(request?: { offset?: IInt64Value['value']; }): Promise { - this.close(); if (this.isDefaultStream()) { return null; } diff --git a/handwritten/bigquery-storage/src/managedwriter/writer.ts b/handwritten/bigquery-storage/src/managedwriter/writer.ts index 04939e0792a..3902eb09533 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer.ts @@ -160,7 +160,7 @@ export class Writer { offsetValue?: IInt64Value['value'] ): PendingWrite { let offset: AppendRowRequest['offset']; - if (offsetValue) { + if (offsetValue !== undefined && offsetValue !== null) { offset = { value: offsetValue, }; diff --git a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts index 55721ab66ca..9b05dfaf403 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts @@ -23,6 +23,10 @@ import {StreamConnection} from './stream_connection'; type StreamConnections = { connectionList: StreamConnection[]; }; +type RetrySettings = { + enableWriteRetries: boolean; + maxRetryAttempts: number; +}; type CreateWriteStreamRequest = protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest; type BatchCommitWriteStreamsRequest = @@ -55,6 +59,12 @@ export class WriterClient { private _client: BigQueryWriteClient; private _connections: StreamConnections; private _open: boolean; + /** + * Retry settings, only internal for now. + * @private + * @internal + */ + _retrySettings: RetrySettings; constructor(opts?: ClientOptions) { const baseOptions = { @@ -69,6 +79,10 @@ export class WriterClient { connectionList: [], }; this._open = false; + this._retrySettings = { + enableWriteRetries: false, + maxRetryAttempts: 4, + }; } /** @@ -102,6 +116,29 @@ export class WriterClient { return this._open; } + /** + * Enables StreamConnections to automatically retry failed appends. + * + * Enabling retries is best suited for cases where users want to achieve at-least-once + * append semantics. Use of automatic retries may complicate patterns where the user + * is designing for exactly-once append semantics. + */ + enableWriteRetries(enable: boolean) { + this._retrySettings.enableWriteRetries = enable; + } + + /** + * Change max retries attempts on child StreamConnections. + * + * The default valuen is to retry 4 times. + * + * Only valid right now when write retries are enabled. + * @see enableWriteRetries. + */ + setMaxRetryAttempts(retryAttempts: number) { + this._retrySettings.maxRetryAttempts = retryAttempts; + } + /** * Creates a write stream to the given table. * Additionally, every table has a special stream named DefaultStream diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index d205d547b21..e66e9693403 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -13,7 +13,7 @@ // limitations under the License. import * as assert from 'assert'; -import {describe, it, xit} from 'mocha'; +import {describe, it} from 'mocha'; import * as uuid from 'uuid'; import * as gax from 'google-gax'; import * as sinon from 'sinon'; @@ -24,6 +24,7 @@ import * as protobuf from 'protobufjs'; import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; +import {PendingWrite} from '../src/managedwriter/pending_write'; const sandbox = sinon.createSandbox(); afterEach(() => sandbox.restore()); @@ -41,6 +42,8 @@ type DescriptorProto = protos.google.protobuf.IDescriptorProto; type IInt64Value = protos.google.protobuf.IInt64Value; type AppendRowsResponse = protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse; +type AppendRowRequest = + protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; const FieldDescriptorProtoType = protos.google.protobuf.FieldDescriptorProto.Type; @@ -51,6 +54,11 @@ const generateUuid = () => `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); const datasetId = generateUuid(); +const sleep = (ms: number) => + new Promise(resolve => { + setTimeout(resolve, ms); + }); + const root = protobuf.Root.fromJSON(customerRecordProtoJson); if (!root) { throw Error('Proto must not be undefined'); @@ -857,6 +865,280 @@ describe('managedwriter.WriterClient', () => { } }); + describe('Flaky Scenarios', () => { + let flakyDatasetId: string; + const flakyRegion = 'us-east7'; + + let rowNum = 0; + const generateRows = (num: number) => { + const rows = []; + for (let i = 0; i < num; i++) { + rows.push({ + customer_name: generateUuid(), + row_num: rowNum++, + }); + } + return rows; + }; + + beforeEach(() => { + rowNum = 0; + }); + + before(async () => { + flakyDatasetId = generateUuid(); + await bigquery.createDataset(flakyDatasetId, { + location: flakyRegion, + }); + }); + + after(async () => { + await bigquery + .dataset(flakyDatasetId) + .delete({force: true}) + .catch(console.warn); + }); + + describe('should manage to send data in sequence scenario', () => { + it('every 10 request drops the connection', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.enableWriteRetries(true); + client.setClient(bqWriteClient); + + try { + const flakyTableId = generateUuid() + '_reconnect_on_close'; + const [table] = await bigquery + .dataset(flakyDatasetId) + .createTable(flakyTableId, { + schema, + location: flakyRegion, + }); + projectId = table.metadata.tableReference.projectId; + parent = `projects/${projectId}/datasets/${flakyDatasetId}/tables/${flakyTableId}`; + + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const iterations = new Array(50).fill(1); + let offset = 0; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for (const _ of iterations) { + const rows = generateRows(10); + const pw = writer.appendRows(rows, offset); + try { + await pw.getResult(); + } catch (err) { + console.error('found error trying to send rows'); + } + offset += 10; + } + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 500); + + writer.close(); + } finally { + client.close(); + } + }).timeout(2 * 60 * 1000); + + it('opening the connection can fail more frequently', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.enableWriteRetries(true); + client.setMaxRetryAttempts(100); // aggresive retries + client.setClient(bqWriteClient); + + try { + const flakyTableId = generateUuid() + '_initial_connect_failure'; + const [table] = await bigquery + .dataset(flakyDatasetId) + .createTable(flakyTableId, { + schema, + location: flakyRegion, + }); + projectId = table.metadata.tableReference.projectId; + parent = `projects/${projectId}/datasets/${flakyDatasetId}/tables/${flakyTableId}`; + + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const iterations = new Array(50).fill(1); + let offset = 0; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for (const _ of iterations) { + const rows = generateRows(10); + const pw = writer.appendRows(rows, offset); + try { + const res = await pw.getResult(); + assert.equal(res.error, null); + } catch (err) { + console.error('found error trying to send rows', err); + throw err; + } + offset += 10; + connection.close(); // Close connection on every append to trigger reconnection + } + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 500); + + writer.close(); + } finally { + client.close(); + } + }).timeout(2 * 60 * 1000); + }); + + describe('should manage to send data in parallel', () => { + it('every 10 request drops the connection', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.enableWriteRetries(true); + client.setMaxRetryAttempts(10); + client.setClient(bqWriteClient); + + try { + const flakyTableId = generateUuid() + '_reconnect_on_close'; + const [table] = await bigquery + .dataset(flakyDatasetId) + .createTable(flakyTableId, { + schema, + location: flakyRegion, + }); + projectId = table.metadata.tableReference.projectId; + parent = `projects/${projectId}/datasets/${flakyDatasetId}/tables/${flakyTableId}`; + + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const pendingWrites: PendingWrite[] = []; + const iterations = new Array(50).fill(1); + let offset = 0; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for (const _ of iterations) { + const rows = generateRows(10); + const pw = writer.appendRows(rows, offset); + pendingWrites.push(pw); + offset += 10; + } + + await Promise.all(pendingWrites.map(pw => pw.getResult())); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 500); + + writer.close(); + } finally { + client.close(); + } + }).timeout(2 * 60 * 1000); + + it('every 10 request there is a in stream INTERNAL error', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.enableWriteRetries(true); + client.setClient(bqWriteClient); + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + let numCalls = 0; + let numSucess = 0; + const conn = connection['_connection'] as gax.CancellableStream; + sandbox + .stub(conn, 'write') + .callsFake( + ( + chunk: unknown, + cb?: ((error: Error | null | undefined) => void) | undefined + ): boolean => { + const req = chunk as AppendRowRequest; + cb && cb(null); + numCalls++; + if (!req.writeStream) { + return false; + } + if (numCalls % 10 === 0) { + const res: AppendRowsResponse = { + writeStream: req.writeStream, + error: { + code: gax.Status.INTERNAL, + message: 'internal error', + }, + }; + conn?.emit('data', res); + } else { + const res: AppendRowsResponse = { + writeStream: req.writeStream, + appendResult: { + offset: req.offset, + }, + }; + conn?.emit('data', res); + numSucess++; + } + return false; + } + ); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const pendingWrites: PendingWrite[] = []; + const iterations = new Array(50).fill(1); + let offset = 0; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for (const _ of iterations) { + const rows = generateRows(10); + const pw = writer.appendRows(rows, offset); + pendingWrites.push(pw); + offset += 10; + } + + await Promise.all(pendingWrites.map(pw => pw.getResult())); + + connection.close(); + assert.equal(numSucess, 50); + + writer.close(); + } finally { + client.close(); + } + }).timeout(2 * 60 * 1000); + }); + }); + describe('Error Scenarios', () => { it('send request with mismatched proto descriptor', async () => { bqWriteClient.initialize(); @@ -1083,9 +1365,10 @@ describe('managedwriter.WriterClient', () => { } }); - it('should trigger reconnection given some specific errors', async () => { + it('should trigger reconnection when connection closes and there are pending writes', async () => { bqWriteClient.initialize(); const client = new WriterClient(); + client.enableWriteRetries(true); client.setClient(bqWriteClient); const connection = await client.createStreamConnection({ @@ -1120,51 +1403,21 @@ describe('managedwriter.WriterClient', () => { ); await pw.getResult(); - const reconnectErrorCases: gax.GoogleError[] = [ - { - code: gax.Status.ABORTED, - msg: 'Closing the stream because it has been inactive', - }, - { - code: gax.Status.RESOURCE_EXHAUSTED, - msg: 'read econnreset', - }, - { - code: gax.Status.ABORTED, - msg: 'service is currently unavailable', - }, - { - code: gax.Status.RESOURCE_EXHAUSTED, - msg: 'bandwidth exhausted', - }, - { - code: gax.Status.RESOURCE_EXHAUSTED, - msg: 'memory limit exceeded', - }, - { - code: gax.Status.CANCELLED, - msg: 'any', - }, - { - code: gax.Status.DEADLINE_EXCEEDED, - msg: 'a msg', - }, - { - code: gax.Status.INTERNAL, - msg: 'received RST_STREAM with code', - }, - ].map(err => { - const gerr = new gax.GoogleError(err.msg); - gerr.code = err.code; - return gerr; - }); - for (const gerr of reconnectErrorCases) { - const conn = connection['_connection'] as gax.CancellableStream; // private method - conn.emit('error', gerr); - assert.equal(reconnectedCalled, true); + const conn = connection['_connection'] as gax.CancellableStream; // private method - reconnectedCalled = false; // reset flag - } + const gerr = new gax.GoogleError('aborted'); + gerr.code = gax.Status.ABORTED; + conn.emit('error', gerr); + conn.emit('close'); + + assert.equal(reconnectedCalled, false); + + // add a fake pending write + connection['_pendingWrites'].push(new PendingWrite({})); + conn.emit('error', gerr); + conn.emit('close'); + + assert.equal(reconnectedCalled, true); writer.close(); } finally { @@ -1172,7 +1425,7 @@ describe('managedwriter.WriterClient', () => { } }); - xit('reconnect on idle connection', async () => { + it('reconnect on idle connection', async () => { bqWriteClient.initialize(); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1195,10 +1448,6 @@ describe('managedwriter.WriterClient', () => { destinationTable: parent, }); - connection.onConnectionError(err => { - console.log('idle conn err', err); - }); - const writer = new JSONWriter({ connection, protoDescriptor, @@ -1207,15 +1456,17 @@ describe('managedwriter.WriterClient', () => { let pw = writer.appendRows([row1, row2], 0); await pw.getResult(); - const sleep = (ms: number) => - new Promise(resolve => { - setTimeout(resolve, ms); - }); - const minutes = 10; - for (let i = 0; i <= minutes; i++) { - console.log('sleeping for a minute: ', minutes - i, 'to go'); - await sleep(60 * 1000); - } + // Simulate server sending ABORT error as the conn was idle + const conn = connection['_connection'] as gax.CancellableStream; // private method + const gerr = new gax.GoogleError( + 'Closing the stream because it has been inactive for 600 seconds' + ); + gerr.code = gax.Status.ABORTED; + conn.emit('error', gerr); + // simulate server closing conn. + await sleep(100); + conn.destroy(); + await sleep(100); const row3 = { customer_name: 'Test', @@ -1234,7 +1485,68 @@ describe('managedwriter.WriterClient', () => { } finally { client.close(); } - }).timeout(20 * 60 * 1000); + }).timeout(20 * 1000); + + it('should mark any pending writes with error if connection was closed', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; + + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const pw1 = writer.appendRows([row1], 0); + await pw1.getResult(); + + // Try to append a new row + const row2 = { + customer_name: 'Test', + row_num: 2, + customer_email: 'test@example.com', + }; + + let foundError: gax.GoogleError | null = null; + const pw2 = writer.appendRows([row2], 1); + pw2.getResult().catch(err => { + foundError = err as gax.GoogleError; + }); + + // Simulate server sending ABORTED error on a write + const conn = connection['_connection'] as gax.CancellableStream; // private method + // swallow ack for the last appendRow call, so we can simulate it failing + conn.removeAllListeners('data'); + await new Promise(resolve => conn.once('data', resolve)); + conn.addListener('data', connection['handleData']); + + // simulate server closing conn. + conn.emit('close'); + await sleep(100); + conn.destroy(); + await sleep(100); + + // should throw error of reconnection + assert.notEqual(foundError, null); + assert.equal(foundError!.message.includes('retry'), true); + + connection.close(); + writer.close(); + } finally { + client.close(); + } + }); }); describe('close', () => { @@ -1310,7 +1622,6 @@ describe('managedwriter.WriterClient', () => { for (const dataset of datasets) { const [metadata] = await dataset.getMetadata(); const creationTime = Number(metadata.creationTime); - if (isResourceStale(creationTime)) { try { await dataset.delete({force: true}); From 7e755b2d8a1d29a4d91d978605774c372ffc430f Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 3 May 2024 08:04:51 -0700 Subject: [PATCH 269/333] feat: add support for RANGE type with Write API and adapt pkg (#437) --- .../bigquery-storage/src/adapt/proto.ts | 37 +++++++++-- .../src/adapt/proto_mappings.ts | 4 +- .../bigquery-storage/src/adapt/schema.ts | 23 +++++++ .../src/adapt/schema_mappings.ts | 1 + .../bigquery-storage/test/adapt/proto.ts | 58 +++++++++++++++++ .../bigquery-storage/test/adapt/schema.ts | 65 +++++++++++++++++++ 6 files changed, 181 insertions(+), 7 deletions(-) diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 1a1ed992894..532def90484 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -92,10 +92,34 @@ function convertStorageSchemaToFileDescriptorInternal( for (const field of schema.fields ?? []) { fNumber += 1; const currentScope = `${scope}_${field.name}`; - if (field.type === TableFieldSchema.Type.STRUCT) { - const subSchema: TableSchema = { - fields: field.fields, - }; + if ( + field.type === TableFieldSchema.Type.STRUCT || + field.type === TableFieldSchema.Type.RANGE + ) { + let subSchema: TableSchema = {}; + switch (field.type) { + case TableFieldSchema.Type.STRUCT: + subSchema = { + fields: field.fields, + }; + break; + case TableFieldSchema.Type.RANGE: + subSchema = { + fields: [ + { + name: 'start', + type: field.rangeElementType?.type, + mode: 'NULLABLE', + }, + { + name: 'end', + type: field.rangeElementType?.type, + mode: 'NULLABLE', + }, + ], + }; + } + const fd = convertStorageSchemaToFileDescriptorInternal( subSchema, currentScope, @@ -227,7 +251,10 @@ function convertTableFieldSchemaToFieldDescriptorProto( } const label = convertModeToLabel(field.mode, useProto3); let fdp: FieldDescriptorProto; - if (type === TableFieldSchema.Type.STRUCT) { + if ( + type === TableFieldSchema.Type.STRUCT || + type === TableFieldSchema.Type.RANGE + ) { fdp = new FieldDescriptorProto({ name: name, number: fNumber, diff --git a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts index 30b87d8524c..28a7f4da870 100644 --- a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts @@ -67,8 +67,8 @@ export const bqTypeToFieldTypeMap: Record< TYPE_UNSPECIFIED: null, [TableFieldSchema.Type.INTERVAL]: null, INTERVAL: null, - [TableFieldSchema.Type.RANGE]: null, - RANGE: null, + [TableFieldSchema.Type.RANGE]: FieldDescriptorProto.Type.TYPE_MESSAGE, + RANGE: FieldDescriptorProto.Type.TYPE_MESSAGE, }; export const bqModeToFieldLabelMapProto2: Record< diff --git a/handwritten/bigquery-storage/src/adapt/schema.ts b/handwritten/bigquery-storage/src/adapt/schema.ts index cd276aa2365..d2bdf3ea8f2 100644 --- a/handwritten/bigquery-storage/src/adapt/schema.ts +++ b/handwritten/bigquery-storage/src/adapt/schema.ts @@ -42,6 +42,16 @@ type ITableFieldSchema = { * [Required] The field data type. Possible values include STRING, BYTES, INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), NUMERIC, BIGNUMERIC, BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, INTERVAL, RECORD (where RECORD indicates that the field contains a nested schema) or STRUCT (same as RECORD). */ type?: string; + + /** + * Represents the type of a field element. + */ + rangeElementType?: { + /** + * Required. The type of a field element. For more information, see TableFieldSchema.type. + */ + type?: string; + }; }; type StorageTableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type StorageTableField = @@ -109,5 +119,18 @@ function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { } out.fields.push(converted); } + + if (field.rangeElementType && field.rangeElementType.type) { + const rtype = fieldTypeMap[field.rangeElementType.type]; + if (!rtype) { + throw Error( + `could not convert range field (${field.name}) due to unknown range element type: ${field.rangeElementType.type}` + ); + } + out.rangeElementType = { + type: rtype, + }; + } + return out; } diff --git a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts index 040d631c666..49bd95950e5 100644 --- a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts @@ -44,6 +44,7 @@ export const fieldTypeMap: Record = { protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.DATETIME, INTERVAL: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.INTERVAL, + RANGE: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.RANGE, RECORD: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.STRUCT, STRUCT: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.STRUCT, JSON: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.JSON, diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index d350e440f1b..6d73e08b240 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -207,6 +207,64 @@ describe('Adapt Protos', () => { const decoded = NestedProto.decode(serialized).toJSON(); assert.deepEqual(raw, decoded); }); + + it('range', () => { + const schema = { + fields: [ + { + name: 'range_ts', + type: 'RANGE', + rangeElementType: { + type: 'TIMESTAMP', + }, + }, + { + name: 'range_dt', + type: 'RANGE', + rangeElementType: { + type: 'DATETIME', + }, + }, + { + name: 'range_d', + type: 'RANGE', + rangeElementType: { + type: 'DATE', + }, + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Test' + ); + assert.notEqual(protoDescriptor, null); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + const TestProto = Type.fromDescriptor(protoDescriptor); + const raw = { + range_dt: { + start: '2024-04-05T15:45:58.981Z', + end: '2024-04-05T16:45:58.981Z', + }, + // The value is the number of days since the Unix epoch (1970-01-01) + range_d: { + start: new Date('2024-04-01').getTime() / (1000 * 60 * 60 * 24), + end: new Date('2024-04-05').getTime() / (1000 * 60 * 60 * 24), + }, + // The value is given in microseconds since the Unix epoch (1970-01-01) + range_ts: { + start: new Date('2024-04-05T15:45:58.981Z').getTime() * 1000, + end: new Date('2024-04-05T16:45:58.981Z').getTime() * 1000, + }, + }; + const serialized = TestProto.encode(raw).finish(); + const decoded = TestProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); }); describe('Proto descriptor normalization', () => { diff --git a/handwritten/bigquery-storage/test/adapt/schema.ts b/handwritten/bigquery-storage/test/adapt/schema.ts index 6c3568ccf45..46abf7f4598 100644 --- a/handwritten/bigquery-storage/test/adapt/schema.ts +++ b/handwritten/bigquery-storage/test/adapt/schema.ts @@ -58,6 +58,7 @@ describe('Adapt Schemas', () => { ], }); }); + it('arrays', () => { const schema = { fields: [ @@ -98,6 +99,7 @@ describe('Adapt Schemas', () => { ], }); }); + it('nested structs', () => { const schema = { fields: [ @@ -152,5 +154,68 @@ describe('Adapt Schemas', () => { ], }); }); + + it('range', () => { + const schema = { + fields: [ + { + name: 'range_ts', + type: 'RANGE', + rangeElementType: { + type: 'TIMESTAMP', + }, + }, + { + name: 'range_dt', + type: 'RANGE', + rangeElementType: { + type: 'DATETIME', + }, + }, + { + name: 'range_d', + type: 'RANGE', + rangeElementType: { + type: 'DATE', + }, + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + assert.notEqual(storageSchema, null); + if (!storageSchema) { + throw Error('null storage schema'); + } + console.log('storageSchema', storageSchema); + assert.deepEqual(storageSchema, { + fields: [ + { + name: 'range_ts', + type: TableFieldSchema.Type.RANGE, + mode: TableFieldSchema.Mode.NULLABLE, + rangeElementType: { + type: TableFieldSchema.Type.TIMESTAMP, + }, + }, + { + name: 'range_dt', + type: TableFieldSchema.Type.RANGE, + mode: TableFieldSchema.Mode.NULLABLE, + rangeElementType: { + type: TableFieldSchema.Type.DATETIME, + }, + }, + { + name: 'range_d', + type: TableFieldSchema.Type.RANGE, + mode: TableFieldSchema.Mode.NULLABLE, + rangeElementType: { + type: TableFieldSchema.Type.DATE, + }, + }, + ], + }); + }); }); }); From b100ad5d8b4a4b8d73ce91c4ebe32daa7d6bff30 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 3 May 2024 10:00:56 -0700 Subject: [PATCH 270/333] chore(main): release 4.6.0 (#448) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.6.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- ...snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 13 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 1dd410ef4ee..ccfd4fd174a 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [4.6.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.5.0...v4.6.0) (2024-05-03) + + +### Features + +* Add support for RANGE type with Write API and adapt pkg ([#437](https://github.com/googleapis/nodejs-bigquery-storage/issues/437)) ([51924ab](https://github.com/googleapis/nodejs-bigquery-storage/commit/51924ab3639253ff79654d90fbada368f5fd5e18)) +* Enable write retry and nack pending writes on reconnect ([#443](https://github.com/googleapis/nodejs-bigquery-storage/issues/443)) ([ce4f88c](https://github.com/googleapis/nodejs-bigquery-storage/commit/ce4f88c668afb8ebf1d5b7ad57f1c4e245a1a8f8)) + ## [4.5.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.4.0...v4.5.0) (2024-04-15) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7bf2f1abdca..7144b268765 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.5.0", + "version": "4.6.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 72b191d5959..9ecb45d5cd3 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.5.0", + "version": "4.6.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index cfb06ecd460..55aac41c7b5 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.5.0", + "version": "4.6.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 35ee1befda1..d6bc2340c79 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.5.0", + "version": "4.6.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 35ee1befda1..d6bc2340c79 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.5.0", + "version": "4.6.0", "language": "TYPESCRIPT", "apis": [ { From f21a45d352c225a4d71ca8f952795b93f938b7e5 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 3 May 2024 11:12:06 -0700 Subject: [PATCH 271/333] feat: add trace id (#447) --- .../src/managedwriter/writer.ts | 19 ++++++ .../system-test/managed_writer_client_test.ts | 67 ++++++++++++++++++- 2 files changed, 84 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/writer.ts b/handwritten/bigquery-storage/src/managedwriter/writer.ts index 3902eb09533..05c9ce767da 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer.ts @@ -16,6 +16,7 @@ import {isDeepStrictEqual} from 'util'; import * as protos from '../../protos/protos'; import {PendingWrite} from './pending_write'; import {StreamConnection} from './stream_connection'; +const version = require('../../../package.json').version; type AppendRowRequest = protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; @@ -76,6 +77,12 @@ export interface WriterOptions { * field path like 'foo.bar'. */ missingValueInterpretations?: MissingValueInterpretationMap; + + /** + * Trace ID allows instruments requests to the service with a custom trace prefix. + * This is generally for diagnostic purposes only. + */ + traceId?: string; } /** @@ -89,6 +96,7 @@ export class Writer { private _protoDescriptor: DescriptorProto; private _streamConnection: StreamConnection; private _defaultMissingValueInterpretation?: MissingValueInterpretation; + private _traceId?: string; private _missingValueInterpretations?: MissingValueInterpretationMap; /** @@ -99,17 +107,27 @@ export class Writer { */ constructor(params: WriterOptions) { const { + traceId, connection, protoDescriptor, missingValueInterpretations, defaultMissingValueInterpretation, } = params; + this._traceId = traceId; this._streamConnection = connection; this._protoDescriptor = new DescriptorProto(protoDescriptor); this._defaultMissingValueInterpretation = defaultMissingValueInterpretation; this._missingValueInterpretations = missingValueInterpretations; } + traceId(): string { + const base = `nodejs-writer:${version}`; + if (this._traceId) { + return `${base} ${this._traceId}`; + } + return base; + } + /** * Update the proto descriptor for the Writer. * Internally a reconnection event is gonna happen to apply @@ -167,6 +185,7 @@ export class Writer { } const request: AppendRowRequest = { writeStream: this._streamConnection.getStreamId(), + traceId: this.traceId(), protoRows: { rows, writerSchema: { diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index e66e9693403..f7f12c5d029 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -25,6 +25,7 @@ import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; +import * as pkg from '../package.json'; const sandbox = sinon.createSandbox(); afterEach(() => sandbox.restore()); @@ -42,7 +43,7 @@ type DescriptorProto = protos.google.protobuf.IDescriptorProto; type IInt64Value = protos.google.protobuf.IInt64Value; type AppendRowsResponse = protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse; -type AppendRowRequest = +type AppendRowsRequest = protos.google.cloud.bigquery.storage.v1.IAppendRowsRequest; const FieldDescriptorProtoType = @@ -377,6 +378,68 @@ describe('managedwriter.WriterClient', () => { }); }); + describe('StreamConnection', () => { + it('should pass traceId on AppendRequests', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + // Row 1 + const row1 = { + customer_name: 'Lovelace', + row_num: 1, + }; + + // Row 2 + const row2 = { + customer_name: 'Turing', + row_num: 2, + }; + + try { + const connection = await client.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable: parent, + }); + let writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + let pw1 = await writer.appendRows([row1, row2]); + let pw2 = await writer.appendRows([row1, row2]); + await Promise.all([pw1.getResult(), pw2.getResult()]); + + let requests = [pw1.getRequest(), pw2.getRequest()]; + requests.every(req => { + assert.notEqual(req.traceId, null); + assert.strictEqual(req.traceId, `nodejs-writer:${pkg.version}`); + }); + + writer = new JSONWriter({ + traceId: 'foo', + connection, + protoDescriptor, + }); + + pw1 = await writer.appendRows([row1, row2]); + pw2 = await writer.appendRows([row1, row2]); + await Promise.all([pw1.getResult(), pw2.getResult()]); + + requests = [pw1.getRequest(), pw2.getRequest()]; + requests.every(req => { + assert.notEqual(req.traceId, null); + assert.strictEqual(req.traceId, `nodejs-writer:${pkg.version} foo`); + }); + + writer.close(); + client.close(); + } finally { + client.close(); + } + }); + }); + describe('JSONEncoder', () => { it('should automatically convert date/datetime/timestamps to expect BigQuery format', () => { const updatedSchema: TableSchema = { @@ -1081,7 +1144,7 @@ describe('managedwriter.WriterClient', () => { chunk: unknown, cb?: ((error: Error | null | undefined) => void) | undefined ): boolean => { - const req = chunk as AppendRowRequest; + const req = chunk as AppendRowsRequest; cb && cb(null); numCalls++; if (!req.writeStream) { From c8a26092a9c056517db84a4043730a9546f732c3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 08:21:58 -0400 Subject: [PATCH 272/333] chore(main): release 4.7.0 (#451) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.7.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...pet_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index ccfd4fd174a..2a75024795e 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.7.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.6.0...v4.7.0) (2024-05-03) + + +### Features + +* Add trace id ([#447](https://github.com/googleapis/nodejs-bigquery-storage/issues/447)) ([19b38a0](https://github.com/googleapis/nodejs-bigquery-storage/commit/19b38a009947f93eb06fd3b127bc23ee3dee2594)) + ## [4.6.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.5.0...v4.6.0) (2024-05-03) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7144b268765..e963bbdffd0 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.6.0", + "version": "4.7.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 9ecb45d5cd3..3bd4fd70819 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.6.0", + "version": "4.7.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 55aac41c7b5..4bb04f7f44c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.6.0", + "version": "4.7.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index d6bc2340c79..4002d9dc4e7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.6.0", + "version": "4.7.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index d6bc2340c79..4002d9dc4e7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.6.0", + "version": "4.7.0", "language": "TYPESCRIPT", "apis": [ { From 010ebffc2b6792b63f431987f2ce605ca583ebc5 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 8 May 2024 09:35:45 -0700 Subject: [PATCH 273/333] feat: add getWriteStream and createWriteStreamFullResponse methods (#453) --- .../bigquery-storage/src/adapt/proto.ts | 10 +- .../src/adapt/schema_mappings.ts | 13 +++ .../src/managedwriter/writer_client.ts | 103 +++++++++++++++--- .../bigquery-storage/test/adapt/proto.ts | 60 ++++++++++ .../bigquery-storage/test/adapt/schema.ts | 1 - 5 files changed, 167 insertions(+), 20 deletions(-) diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 532def90484..870ede032be 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -14,6 +14,7 @@ import * as protos from '../../protos/protos'; import {bqTypeToFieldTypeMap, convertModeToLabel} from './proto_mappings'; +import {normalizeFieldType} from './schema_mappings'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type TableFieldSchema = @@ -92,12 +93,13 @@ function convertStorageSchemaToFileDescriptorInternal( for (const field of schema.fields ?? []) { fNumber += 1; const currentScope = `${scope}_${field.name}`; + const normalizedType = normalizeFieldType(field); if ( - field.type === TableFieldSchema.Type.STRUCT || - field.type === TableFieldSchema.Type.RANGE + normalizedType === TableFieldSchema.Type.STRUCT || + normalizedType === TableFieldSchema.Type.RANGE ) { let subSchema: TableSchema = {}; - switch (field.type) { + switch (normalizedType) { case TableFieldSchema.Type.STRUCT: subSchema = { fields: field.fields, @@ -245,7 +247,7 @@ function convertTableFieldSchemaToFieldDescriptorProto( useProto3: boolean ): FieldDescriptorProto { const name = field.name; - const type = field.type; + const type = normalizeFieldType(field); if (!type) { throw Error(`table field ${name} missing type`); } diff --git a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts index 49bd95950e5..87c5e13b122 100644 --- a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts @@ -52,6 +52,19 @@ export const fieldTypeMap: Record = { protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Type.GEOGRAPHY, }; +export function normalizeFieldType( + field: StorageTableField +): StorageTableField['type'] { + if (field.type) { + const ftype = fieldTypeMap[field.type]; + if (!ftype) { + return field.type; + } + return ftype; + } + return field.type; +} + export const modeMap: Record = { NULLABLE: protos.google.cloud.bigquery.storage.v1.TableFieldSchema.Mode.NULLABLE, diff --git a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts index 9b05dfaf403..0bbf7d090b0 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts @@ -17,7 +17,12 @@ import type {CallOptions, ClientOptions} from 'google-gax'; import * as protos from '../../protos/protos'; import {BigQueryWriteClient} from '../v1'; -import {WriteStreamType, DefaultStream, streamTypeToEnum} from './stream_types'; +import { + WriteStreamType, + DefaultStream, + streamTypeToEnum, + WriteStream, +} from './stream_types'; import {StreamConnection} from './stream_connection'; type StreamConnections = { @@ -29,6 +34,9 @@ type RetrySettings = { }; type CreateWriteStreamRequest = protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest; +type GetWriteStreamRequest = + protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest; +type WriteStreamView = protos.google.cloud.bigquery.storage.v1.WriteStreamView; type BatchCommitWriteStreamsRequest = protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest; type BatchCommitWriteStreamsResponse = @@ -140,7 +148,9 @@ export class WriterClient { } /** - * Creates a write stream to the given table. + * Creates a write stream to the given table and return just the + * streamId. + * * Additionally, every table has a special stream named DefaultStream * to which data can be written. This stream doesn't need to be created using * createWriteStream. It is a stream that can be used simultaneously by any @@ -156,10 +166,46 @@ export class WriterClient { * of `projects/{project}/datasets/{dataset}/tables/{table}`. * @returns {Promise}} - The promise which resolves to the streamId. */ - async createWriteStream(request: { - streamType: WriteStreamType; - destinationTable: string; - }): Promise { + async createWriteStream( + request: { + streamType: WriteStreamType; + destinationTable: string; + }, + options?: CallOptions + ): Promise { + const stream = await this.createWriteStreamFullResponse(request, options); + if (stream.name) { + return stream.name; + } + return ''; + } + + /** + * Creates a write stream to the given table and return all + * information about it. + * + * Additionally, every table has a special stream named DefaultStream + * to which data can be written. This stream doesn't need to be created using + * createWriteStream. It is a stream that can be used simultaneously by any + * number of clients. Data written to this stream is considered committed as + * soon as an acknowledgement is received. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.streamType + * Required. The type of stream to create. + * @param {string} request.destinationTable + * Required. Reference to the table to which the stream belongs, in the format + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @returns {Promise}} - The promise which resolves to the WriteStream. + */ + async createWriteStreamFullResponse( + request: { + streamType: WriteStreamType; + destinationTable: string; + }, + options?: CallOptions + ): Promise { await this.initialize(); const {streamType, destinationTable} = request; const createReq: CreateWriteStreamRequest = { @@ -168,19 +214,46 @@ export class WriterClient { type: streamTypeToEnum(streamType), }, }; - const [response] = await this._client.createWriteStream(createReq); + const [response] = await this._client.createWriteStream(createReq, options); if (typeof [response] === undefined) { throw new gax.GoogleError(`${response}`); } - try { - if (response.name) { - const streamId = response.name; - return streamId; - } - return ''; - } catch { - throw new Error('Stream connection failed'); + return response; + } + + /** + * Gets information about a write stream. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.streamId + * Required. Name of the stream to get, in the form of + * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}` + * @param {WriteStreamView} request.view + * Indicates whether to get full or partial view of the WriteStream. If + * not set, view returned will be basic. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise}} - The promise which resolves to the WriteStream. + */ + async getWriteStream( + request: { + streamId: string; + view?: WriteStreamView; + }, + options?: CallOptions + ): Promise { + await this.initialize(); + const {streamId, view} = request; + const getReq: GetWriteStreamRequest = { + name: streamId, + view, + }; + const [response] = await this._client.getWriteStream(getReq, options); + if (typeof [response] === undefined) { + throw new gax.GoogleError(`${response}`); } + return response; } /** diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 6d73e08b240..6973b06b9cf 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -19,6 +19,10 @@ import * as adapt from '../../src/adapt'; import * as messagesJSON from '../../samples/testdata/messages.json'; import * as protos from '../../protos/protos'; +type TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.ITableFieldSchema; +const TableFieldSchema = + protos.google.cloud.bigquery.storage.v1.TableFieldSchema; const DescriptorProto = protos.google.protobuf.DescriptorProto; const {Root, Type} = protobuf; @@ -265,6 +269,62 @@ describe('Adapt Protos', () => { const decoded = TestProto.decode(serialized).toJSON(); assert.deepEqual(raw, decoded); }); + + it('convert both string and numeric value of table schema field type', () => { + const schema: TableFieldSchema = { + fields: [ + { + name: 'rowNum', + type: TableFieldSchema.Type.NUMERIC, + mode: 'NULLABLE', + description: '', + }, + { + name: 'range', + type: 'RANGE', + mode: 'NULLABLE', + description: '', + rangeElementType: { + type: 'TIMESTAMP', + }, + }, + { + name: 'nested', + type: TableFieldSchema.Type.STRUCT, + mode: 'REQUIRED', + description: '', + fields: [ + { + name: 'integer', + mode: 'REQUIRED', + description: '', + type: TableFieldSchema.Type.INT64, + }, + ], + }, + ], + }; + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + schema, + 'root' + ); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + const TestProto = Type.fromDescriptor(protoDescriptor); + const raw = { + rowNum: '1', + range: { + start: new Date('2024-04-05T15:45:58.981Z').getTime() * 1000, + }, + nested: { + integer: 10, + }, + }; + const serialized = TestProto.encode(raw).finish(); + const decoded = TestProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); }); describe('Proto descriptor normalization', () => { diff --git a/handwritten/bigquery-storage/test/adapt/schema.ts b/handwritten/bigquery-storage/test/adapt/schema.ts index 46abf7f4598..52560372fc3 100644 --- a/handwritten/bigquery-storage/test/adapt/schema.ts +++ b/handwritten/bigquery-storage/test/adapt/schema.ts @@ -187,7 +187,6 @@ describe('Adapt Schemas', () => { if (!storageSchema) { throw Error('null storage schema'); } - console.log('storageSchema', storageSchema); assert.deepEqual(storageSchema, { fields: [ { From 1b1ffa98e5c20b6a987ff9a2d50a0db9140f0ec2 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 9 May 2024 07:51:39 -0700 Subject: [PATCH 274/333] docs: change README to show Write veneer (#452) --- .../bigquery-storage/.readme-partials.yaml | 107 ++++++++++++++++++ handwritten/bigquery-storage/README.md | 107 +++++++++++++++++- 2 files changed, 213 insertions(+), 1 deletion(-) create mode 100644 handwritten/bigquery-storage/.readme-partials.yaml diff --git a/handwritten/bigquery-storage/.readme-partials.yaml b/handwritten/bigquery-storage/.readme-partials.yaml new file mode 100644 index 00000000000..12bd9210da7 --- /dev/null +++ b/handwritten/bigquery-storage/.readme-partials.yaml @@ -0,0 +1,107 @@ +introduction: |- + > Node.js idiomatic client for [BigQuery Storage](https://cloud.google.com/bigquery). + + The BigQuery Storage product is divided into two major APIs: Write and Read API. + BigQuery Storage API does not provide functionality related to managing BigQuery + resources such as datasets, jobs, or tables. + + The BigQuery Storage Write API is a unified data-ingestion API for BigQuery. + It combines streaming ingestion and batch loading into a single high-performance API. + You can use the Storage Write API to stream records into BigQuery in real time or + to batch process an arbitrarily large number of records and commit them in a single + atomic operation. + + Read more in our [introduction guide](https://cloud.google.com/bigquery/docs/write-api). + + Using a system provided default stream, this code sample demonstrates using the + schema of a destination stream/table to construct a writer, and send several + batches of row data to the table. + + ```javascript + const {adapt, managedwriter} = require('@google-cloud/bigquery-storage'); + const {WriterClient, JSONWriter} = managedwriter; + + async function appendJSONRowsDefaultStream() { + const projectId = 'my_project'; + const datasetId = 'my_dataset'; + const tableId = 'my_table'; + + const destinationTable = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; + const writeClient = new WriterClient({projectId}); + + try { + const writeStream = await writeClient.getWriteStream({ + streamId: `${destinationTable}/streams/_default`, + view: 'FULL' + }); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + writeStream.tableSchema, + 'root' + ); + + const connection = await writeClient.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable, + }); + const streamId = connection.getStreamId(); + + const writer = new JSONWriter({ + streamId, + connection, + protoDescriptor, + }); + + let rows = []; + const pendingWrites = []; + + // Row 1 + let row = { + row_num: 1, + customer_name: 'Octavia', + }; + rows.push(row); + + // Row 2 + row = { + row_num: 2, + customer_name: 'Turing', + }; + rows.push(row); + + // Send batch. + let pw = writer.appendRows(rows); + pendingWrites.push(pw); + + rows = []; + + // Row 3 + row = { + row_num: 3, + customer_name: 'Bell', + }; + rows.push(row); + + // Send batch. + pw = writer.appendRows(rows); + pendingWrites.push(pw); + + const results = await Promise.all( + pendingWrites.map(pw => pw.getResult()) + ); + console.log('Write results:', results); + } catch (err) { + console.log(err); + } finally { + writeClient.close(); + } + } + ``` + + The BigQuery Storage Read API provides fast access to BigQuery-managed storage by + using an gRPC based protocol. When you use the Storage Read API, structured data is + sent over the wire in a binary serialization format. This allows for additional + parallelism among multiple consumers for a set of results. + + Read more how to [use the BigQuery Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage). + + See sample code on the [Quickstart section](#quickstart). \ No newline at end of file diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index c944a95422c..6fac27cd146 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -10,7 +10,112 @@ -Client for the BigQuery Storage API +> Node.js idiomatic client for [BigQuery Storage](https://cloud.google.com/bigquery). + +The BigQuery Storage product is divided into two major APIs: Write and Read API. +BigQuery Storage API does not provide functionality related to managing BigQuery +resources such as datasets, jobs, or tables. + +The BigQuery Storage Write API is a unified data-ingestion API for BigQuery. +It combines streaming ingestion and batch loading into a single high-performance API. +You can use the Storage Write API to stream records into BigQuery in real time or +to batch process an arbitrarily large number of records and commit them in a single +atomic operation. + +Read more in our [introduction guide](https://cloud.google.com/bigquery/docs/write-api). + +Using a system provided default stream, this code sample demonstrates using the +schema of a destination stream/table to construct a writer, and send several +batches of row data to the table. + +```javascript +const {adapt, managedwriter} = require('@google-cloud/bigquery-storage'); +const {WriterClient, JSONWriter} = managedwriter; + +async function appendJSONRowsDefaultStream() { + const projectId = 'my_project'; + const datasetId = 'my_dataset'; + const tableId = 'my_table'; + + const destinationTable = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; + const writeClient = new WriterClient({projectId}); + + try { + const writeStream = await writeClient.getWriteStream({ + streamId: `${destinationTable}/streams/_default`, + view: 'FULL' + }); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + writeStream.tableSchema, + 'root' + ); + + const connection = await writeClient.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable, + }); + const streamId = connection.getStreamId(); + + const writer = new JSONWriter({ + streamId, + connection, + protoDescriptor, + }); + + let rows = []; + const pendingWrites = []; + + // Row 1 + let row = { + row_num: 1, + customer_name: 'Octavia', + }; + rows.push(row); + + // Row 2 + row = { + row_num: 2, + customer_name: 'Turing', + }; + rows.push(row); + + // Send batch. + let pw = writer.appendRows(rows); + pendingWrites.push(pw); + + rows = []; + + // Row 3 + row = { + row_num: 3, + customer_name: 'Bell', + }; + rows.push(row); + + // Send batch. + pw = writer.appendRows(rows); + pendingWrites.push(pw); + + const results = await Promise.all( + pendingWrites.map(pw => pw.getResult()) + ); + console.log('Write results:', results); + } catch (err) { + console.log(err); + } finally { + writeClient.close(); + } +} +``` + +The BigQuery Storage Read API provides fast access to BigQuery-managed storage by +using an gRPC based protocol. When you use the Storage Read API, structured data is +sent over the wire in a binary serialization format. This allows for additional +parallelism among multiple consumers for a set of results. + +Read more how to [use the BigQuery Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage). + +See sample code on the [Quickstart section](#quickstart). A comprehensive list of changes in each version may be found in From 07f035c71de525047db3f8a7506e01abd4247b21 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 09:14:56 -0400 Subject: [PATCH 275/333] chore: update copyright year for auto-generated protos (#455) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update copyright year for auto-generated protos PiperOrigin-RevId: 631538781 Source-Link: https://github.com/googleapis/googleapis/commit/3597f7db2191c00b100400991ef96e52d62f5841 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8021fdf8d66f6005519c044d5834124b677dc919 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODAyMWZkZjhkNjZmNjAwNTUxOWMwNDRkNTgzNDEyNGI2NzdkYzkxOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../protos/google/cloud/bigquery/storage/v1/arrow.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/avro.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/protobuf.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/storage.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/stream.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1/table.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1beta1/arrow.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1beta1/avro.proto | 2 +- .../google/cloud/bigquery/storage/v1beta1/read_options.proto | 2 +- .../protos/google/cloud/bigquery/storage/v1beta1/storage.proto | 2 +- .../google/cloud/bigquery/storage/v1beta1/table_reference.proto | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 05036d21db5..530d4179d1b 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index 588406aba31..b104a90638b 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto index e12f4d8db07..99a9c77492d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index 97eb35c214d..d83dacc465c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index c75c637cf3f..511eb4047ed 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 7f9dde5f6b7..5438fd3f62a 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto index 4894e2fea8d..b7decf20c4a 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto index ad388d42d8c..96e268f2f2c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 2a4a9076169..3f863e71c2b 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 3f9f218f12a..67422bee643 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index 3fe3dfe27d2..1c194a8066c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 14546ea53f5f6de3cc379c7ad668a3dd933d0122 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 10 May 2024 11:45:11 -0700 Subject: [PATCH 276/333] ci: fix npm pack/publish by removing import package.json (#458) --- .../system-test/managed_writer_client_test.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index f7f12c5d029..4d7350f4130 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -13,6 +13,8 @@ // limitations under the License. import * as assert from 'assert'; +import {readFileSync} from 'fs'; +import * as path from 'path'; import {describe, it} from 'mocha'; import * as uuid from 'uuid'; import * as gax from 'google-gax'; @@ -25,7 +27,10 @@ import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; -import * as pkg from '../package.json'; + +const pkg = JSON.parse( + readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8') +); const sandbox = sinon.createSandbox(); afterEach(() => sandbox.restore()); From d11283c920b1578e9ad4b3f6da2ebcaf78733701 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 15:03:43 -0400 Subject: [PATCH 277/333] chore(main): release 4.8.0 (#456) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.8.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...pet_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 2a75024795e..a759778f613 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.8.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.7.0...v4.8.0) (2024-05-10) + + +### Features + +* Add getWriteStream and createWriteStreamFullResponse methods ([#453](https://github.com/googleapis/nodejs-bigquery-storage/issues/453)) ([27dce6a](https://github.com/googleapis/nodejs-bigquery-storage/commit/27dce6a763c07a9fab70a18f15305e92fdbea5f1)) + ## [4.7.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.6.0...v4.7.0) (2024-05-03) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index e963bbdffd0..c83973b09ff 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.7.0", + "version": "4.8.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 3bd4fd70819..a50f511e516 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.7.0", + "version": "4.8.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 4bb04f7f44c..92c2563a441 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.7.0", + "version": "4.8.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 4002d9dc4e7..5ba0d32f04d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.7.0", + "version": "4.8.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 4002d9dc4e7..5ba0d32f04d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.7.0", + "version": "4.8.0", "language": "TYPESCRIPT", "apis": [ { From bef56cb61a0e04b293fba262205e9b0af31a6ed6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 22 May 2024 13:58:34 -0700 Subject: [PATCH 278/333] feat: update Nodejs generator to send API versions in headers for GAPICs (#459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: update Nodejs generator to send API versions in headers for GAPICs PiperOrigin-RevId: 634109303 Source-Link: https://github.com/googleapis/googleapis/commit/998ade8d5e34d18df5ce36ce2baefdd57f4da375 Source-Link: https://github.com/googleapis/googleapis-gen/commit/000ca6f00801f65b847e6029cb05111404df21ec Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDAwY2E2ZjAwODAxZjY1Yjg0N2U2MDI5Y2IwNTExMTQwNGRmMjFlYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../src/v1/big_query_read_client.ts | 2 +- .../src/v1/big_query_write_client.ts | 2 +- .../src/v1beta1/big_query_storage_client.ts | 2 +- .../test/gapic_big_query_read_v1.ts | 50 ++++++++++++++++++ .../test/gapic_big_query_storage_v1beta1.ts | 52 +++++++++++++++++++ 5 files changed, 105 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 396581d0cd0..12618c432af 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -219,7 +219,7 @@ export class BigQueryReadClient { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - /* gaxStreamingRetries: */ false + !!opts.gaxServerStreamingRetries ), }; diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index 8bf17046fb6..cd9ed9e78e2 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -222,7 +222,7 @@ export class BigQueryWriteClient { appendRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, - /* gaxStreamingRetries: */ false + !!opts.gaxServerStreamingRetries ), }; diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index a8b32943153..2cef5cca838 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -218,7 +218,7 @@ export class BigQueryStorageClient { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - /* gaxStreamingRetries: */ false + !!opts.gaxServerStreamingRetries ), }; diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index e6a50c7ee64..8bfd6ebe5a6 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -578,6 +578,50 @@ describe('v1.BigQueryReadClient', () => { assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); + it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + gaxServerStreamingRetries: true, + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1.ReadRowsRequest', + ['readStream'] + ); + request.readStream = defaultValue1; + const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + it('invokes readRows with error', async () => { const client = new bigqueryreadModule.v1.BigQueryReadClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, @@ -657,6 +701,12 @@ describe('v1.BigQueryReadClient', () => { }); await assert.rejects(promise, expectedError); }); + it('should create a client with gaxServerStreamingRetries enabled', () => { + const client = new bigqueryreadModule.v1.BigQueryReadClient({ + gaxServerStreamingRetries: true, + }); + assert(client); + }); }); describe('Path templates', () => { diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 0b74bd11430..2f03bc17ac7 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -885,6 +885,52 @@ describe('v1beta1.BigQueryStorageClient', () => { assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); }); + it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + gaxServerStreamingRetries: true, + }); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + ); + request.readPosition ??= {}; + request.readPosition.stream ??= {}; + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', + ['readPosition', 'stream', 'name'] + ); + request.readPosition.stream.name = defaultValue1; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() + ); + client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); + const stream = client.readRows(request); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.readRows as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + it('invokes readRows with error', async () => { const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ credentials: {client_email: 'bogus', private_key: 'bogus'}, @@ -968,6 +1014,12 @@ describe('v1beta1.BigQueryStorageClient', () => { }); await assert.rejects(promise, expectedError); }); + it('should create a client with gaxServerStreamingRetries enabled', () => { + const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + gaxServerStreamingRetries: true, + }); + assert(client); + }); }); describe('Path templates', () => { From a902d148e64f09610382fbb6b898df6e917b472e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 27 May 2024 16:18:32 +0200 Subject: [PATCH 279/333] chore(deps): update dependency sinon to v18 (#460) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c83973b09ff..660bbf58e4c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -51,7 +51,7 @@ "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^2.0.0", - "sinon": "^17.0.0", + "sinon": "^18.0.0", "ts-loader": "^9.0.0", "typescript": "^5.1.6", "uuid": "^9.0.0", From b4450536822890827a2616aa6cff4b5a3eb01fb8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 17:35:03 -0700 Subject: [PATCH 280/333] chore: [node] add auto-approve templates, and install dependencies with engines-strict (#463) * chore: [node] add auto-approve templates, and install dependencies with engines-strict chore: add auto-approve templates, and install dependencies with engines-strict Source-Link: https://github.com/googleapis/synthtool/commit/4a02d97333d1c1642d1b19b00645afdcf4ab36a4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:68e1cece0d6d3336c4f1cb9d2857b020af5574dff6da6349293d1c6d4eea82d8 * Update package.json --------- Co-authored-by: Owl Bot Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 6 +++--- handwritten/bigquery-storage/.github/auto-approve.yml | 3 ++- handwritten/bigquery-storage/package.json | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 638efabfb52..34bb2086de0 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:e92044720ab3cb6984a70b0c6001081204375959ba3599ef6c42dd99a7783a67 -# created: 2023-11-10T00:24:05.581078808Z + digest: sha256:68e1cece0d6d3336c4f1cb9d2857b020af5574dff6da6349293d1c6d4eea82d8 +# created: 2024-05-31T15:46:42.989947733Z diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml index 4cd91cc16ae..ec51b072dca 100644 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -1,3 +1,4 @@ processes: - "NodeDependency" - - "OwlBotTemplateChanges" + - "OwlBotTemplateChangesNode" + - "OwlBotPRsNode" \ No newline at end of file diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 660bbf58e4c..0ee9b4b8ff6 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -47,7 +47,7 @@ "jsdoc": "^4.0.0", "jsdoc-fresh": "^3.0.0", "jsdoc-region-tag": "^3.0.0", - "linkinator": "^5.0.0", + "linkinator": "^3.0.0", "mocha": "^9.2.2", "null-loader": "^4.0.0", "pack-n-play": "^2.0.0", From 08a7ce33aa9208d5e6c38e151464687a838f2ec3 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Tue, 2 Jul 2024 11:42:12 -0400 Subject: [PATCH 281/333] fix: typo - commited -> committed (#468) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: typo - commited -> committed * fix: update eslint rules after gts 5.3.1 broke it * fix: output of npm run fix * fix typos in samples readme * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/README.md | 2 +- .../system-test/managed_writer_client_test.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 6fac27cd146..f9f0725b51a 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -291,7 +291,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-st | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | | Append_rows_buffered | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_buffered.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_buffered.js,samples/README.md) | -| Append_rows_json_writer_commited | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_commited.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_commited.js,samples/README.md) | +| Append_rows_json_writer_committed | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_committed.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_committed.js,samples/README.md) | | Append_rows_json_writer_default | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_default.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_default.js,samples/README.md) | | Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | | Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 4d7350f4130..0da9ae4b86c 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1683,8 +1683,8 @@ describe('managedwriter.WriterClient', () => { async function deleteDatasets() { let [datasets] = await bigquery.getDatasets(); - datasets = datasets.filter( - dataset => dataset.id?.includes(GCLOUD_TESTS_PREFIX) + datasets = datasets.filter(dataset => + dataset.id?.includes(GCLOUD_TESTS_PREFIX) ); for (const dataset of datasets) { From 1abee64f617f762e132bbe2cae2ff016cb41af40 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Jul 2024 13:21:42 -0400 Subject: [PATCH 282/333] ci: Enable `constraintsFiltering` for Node.js Libraries (#466) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: Enable `constraintsFiltering` for Node.js Libraries chore: Enable `constraintsFiltering` for Node.js Libraries Source-Link: https://github.com/googleapis/synthtool/commit/dae1282201b64e4da3ad512632cb4dda70a832a1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:d920257482ca1cd72978f29f7d28765a9f8c758c21ee0708234db5cf4c5016c2 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 34bb2086de0..9e90d54bfb2 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:68e1cece0d6d3336c4f1cb9d2857b020af5574dff6da6349293d1c6d4eea82d8 -# created: 2024-05-31T15:46:42.989947733Z + digest: sha256:d920257482ca1cd72978f29f7d28765a9f8c758c21ee0708234db5cf4c5016c2 +# created: 2024-06-12T16:18:41.688792375Z From e62fb155cf3f913d2c8bb39d0b881d68007078b5 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 15 Aug 2024 09:54:22 -0700 Subject: [PATCH 283/333] refactor: move logger to util folder (#472) Towards #431, separating logger changes. --- handwritten/bigquery-storage/src/managedwriter/index.ts | 2 +- .../bigquery-storage/src/managedwriter/stream_connection.ts | 2 +- .../bigquery-storage/src/{managedwriter => util}/logger.ts | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename handwritten/bigquery-storage/src/{managedwriter => util}/logger.ts (100%) diff --git a/handwritten/bigquery-storage/src/managedwriter/index.ts b/handwritten/bigquery-storage/src/managedwriter/index.ts index 0ce72a8eb51..fd33b53d640 100644 --- a/handwritten/bigquery-storage/src/managedwriter/index.ts +++ b/handwritten/bigquery-storage/src/managedwriter/index.ts @@ -34,4 +34,4 @@ export { PendingStream, } from './stream_types'; export {parseStorageErrors} from './error'; -export {setLogFunction} from './logger'; +export {setLogFunction} from '../util/logger'; diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 56e05e10865..da312f27828 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -18,7 +18,7 @@ import * as protos from '../../protos/protos'; import {WriterClient} from './writer_client'; import {PendingWrite} from './pending_write'; -import {logger} from './logger'; +import {logger} from '../util/logger'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type IInt64Value = protos.google.protobuf.IInt64Value; diff --git a/handwritten/bigquery-storage/src/managedwriter/logger.ts b/handwritten/bigquery-storage/src/util/logger.ts similarity index 100% rename from handwritten/bigquery-storage/src/managedwriter/logger.ts rename to handwritten/bigquery-storage/src/util/logger.ts From 6a9b53d8dc7624e5994dc90b83d7c75ab828f3c5 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 21 Aug 2024 11:04:01 -0700 Subject: [PATCH 284/333] feat: support BQ Storage Write CDC (#473) --- .../bigquery-storage/src/adapt/index.ts | 2 + .../bigquery-storage/src/adapt/options.ts | 45 +++++++ .../bigquery-storage/src/adapt/proto.ts | 57 +++++++- .../system-test/managed_writer_client_test.ts | 125 ++++++++++++++++++ .../bigquery-storage/test/adapt/proto.ts | 39 ++++++ 5 files changed, 263 insertions(+), 5 deletions(-) create mode 100644 handwritten/bigquery-storage/src/adapt/options.ts diff --git a/handwritten/bigquery-storage/src/adapt/index.ts b/handwritten/bigquery-storage/src/adapt/index.ts index 8a04df1e152..9afc0bdd5f3 100644 --- a/handwritten/bigquery-storage/src/adapt/index.ts +++ b/handwritten/bigquery-storage/src/adapt/index.ts @@ -18,3 +18,5 @@ export { } from './proto'; export {convertBigQuerySchemaToStorageTableSchema} from './schema'; + +export {withChangeType, withChangeSequenceNumber} from './options'; diff --git a/handwritten/bigquery-storage/src/adapt/options.ts b/handwritten/bigquery-storage/src/adapt/options.ts new file mode 100644 index 00000000000..d87c5ae2fe7 --- /dev/null +++ b/handwritten/bigquery-storage/src/adapt/options.ts @@ -0,0 +1,45 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +export type AdaptOptions = { + addChangeSequenceNumber: boolean; + addChangeType: boolean; +}; + +export type AdaptOption = (opts: AdaptOptions) => AdaptOptions; + +/** + * Add pseudocolumn `_CHANGE_TYPE` for BigQuery Change Data Capture. + * Used to define the type of change to be professed for each row. + * The pseudocolumn `_CHANGE_TYPE` only accepts the values UPSERT and DELETE. + * See more: https://cloud.google.com/bigquery/docs/change-data-capture#specify_changes_to_existing_records + */ +export function withChangeType(): AdaptOption { + return (opts: AdaptOptions) => ({ + ...opts, + addChangeType: true, + }); +} + +/** + * Add pseudocolumn `_CHANGE_SEQUENCE_NUMBER` for BigQuery Change Data Capture. + * Used to change behavior of ordering records with same primary key. + * See more: https://cloud.google.com/bigquery/docs/change-data-capture#manage_custom_ordering + */ +export function withChangeSequenceNumber(): AdaptOption { + return (opts: AdaptOptions) => ({ + ...opts, + addChangeSequenceNumber: true, + }); +} diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 870ede032be..96ffc335f60 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -15,6 +15,7 @@ import * as protos from '../../protos/protos'; import {bqTypeToFieldTypeMap, convertModeToLabel} from './proto_mappings'; import {normalizeFieldType} from './schema_mappings'; +import {AdaptOptions, AdaptOption} from './options'; type TableSchema = protos.google.cloud.bigquery.storage.v1.ITableSchema; type TableFieldSchema = @@ -59,12 +60,14 @@ const packedTypes: FieldDescriptorProtoType[] = [ */ export function convertStorageSchemaToProto2Descriptor( schema: TableSchema, - scope: string + scope: string, + ...opts: AdaptOption[] ): DescriptorProto { const fds = convertStorageSchemaToFileDescriptorInternal( schema, scope, - false + false, + ...opts ); return normalizeDescriptorSet(fds); } @@ -76,17 +79,32 @@ export function convertStorageSchemaToProto2Descriptor( */ export function convertStorageSchemaToProto3Descriptor( schema: TableSchema, - scope: string + scope: string, + ...opts: AdaptOption[] ): DescriptorProto { - const fds = convertStorageSchemaToFileDescriptorInternal(schema, scope, true); + const fds = convertStorageSchemaToFileDescriptorInternal( + schema, + scope, + true, + ...opts + ); return normalizeDescriptorSet(fds); } function convertStorageSchemaToFileDescriptorInternal( schema: TableSchema, scope: string, - useProto3: boolean + useProto3: boolean, + ...opts: AdaptOption[] ): FileDescriptorSet { + let adaptOpts: AdaptOptions = { + addChangeSequenceNumber: false, + addChangeType: false, + }; + opts.forEach(f => { + adaptOpts = f(adaptOpts); + }); + let fNumber = 0; const fields: FieldDescriptorProto[] = []; const deps = new Map(); @@ -150,6 +168,35 @@ function convertStorageSchemaToFileDescriptorInternal( } } + if (adaptOpts) { + if (adaptOpts.addChangeSequenceNumber) { + const fdp = convertTableFieldSchemaToFieldDescriptorProto( + { + name: '_CHANGE_SEQUENCE_NUMBER', + type: 'STRING', + mode: 'REQUIRED', + }, + 991, + scope, + useProto3 + ); + fields.push(fdp); + } + if (adaptOpts.addChangeType) { + const fdp = convertTableFieldSchemaToFieldDescriptorProto( + { + name: '_CHANGE_TYPE', + type: 'STRING', + mode: 'REQUIRED', + }, + 992, + scope, + useProto3 + ); + fields.push(fdp); + } + } + const dp = new DescriptorProto({ name: scope, field: fields, diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 0da9ae4b86c..614ecca3d8f 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -795,6 +795,131 @@ describe('managedwriter.WriterClient', () => { client.close(); } }).timeout(30 * 1000); + + it('Change data capture (CDC)', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const schema: TableSchema = { + fields: [ + { + name: 'id', + type: 'INTEGER', + mode: 'REQUIRED', + }, + { + name: 'username', + type: 'STRING', + mode: 'REQUIRED', + }, + ], + }; + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId + '_cdc', { + schema, + clustering: { + fields: ['id'], + }, + tableConstraints: { + primaryKey: { + columns: ['id'], + }, + }, + }); + const parent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'root', + adapt.withChangeType() + ); + + const row1 = { + id: 1, + username: 'Alice', + _CHANGE_TYPE: 'INSERT', + }; + + const row2 = { + id: 2, + username: 'Bob', + _CHANGE_TYPE: 'INSERT', + }; + + try { + const insertConn = await client.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection: insertConn, + protoDescriptor, + }); + + let pw = writer.appendRows([row1, row2]); + let result = await pw.getResult(); + + let [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + ); + assert.strictEqual(rows.length, 2); + + const updaterConn = await client.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable: parent, + }); + + const updater = new JSONWriter({ + connection: updaterConn, + protoDescriptor, + }); + + // Change Alice and send Charles + row1.username = 'Alice in Wonderlands'; + row1._CHANGE_TYPE = 'UPSERT'; + + const row3 = { + id: 3, + username: 'Charles', + _CHANGE_TYPE: 'UPSERT', + }; + + pw = updater.appendRows([row1, row3]); + result = await pw.getResult(); + + [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + ); + assert.strictEqual(rows.length, 3); + + // Remove Bob + row2._CHANGE_TYPE = 'DELETE'; + + pw = updater.appendRows([row2]); + result = await pw.getResult(); + + [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + ); + assert.strictEqual(rows.length, 2); + + assert.deepStrictEqual(rows, [ + {id: 1, username: 'Alice in Wonderlands'}, + {id: 3, username: 'Charles'}, + ]); + + writer.close(); + updater.close(); + } finally { + client.close(); + } + }); }); it('should fill default values when MissingValuesInterpretation is set', async () => { diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 6973b06b9cf..2e34b02bf26 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -75,6 +75,45 @@ describe('Adapt Protos', () => { assert.deepEqual(raw, decoded); }); + it('basic with CDC fields', () => { + const schema = { + fields: [ + { + name: 'id', + type: 'INTEGER', + mode: 'NULLABLE', + }, + { + name: 'username', + type: 'STRING', + mode: 'REQUIRED', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Test', + adapt.withChangeType(), + adapt.withChangeSequenceNumber() + ); + assert.notEqual(protoDescriptor, null); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + const TestProto = Type.fromDescriptor(protoDescriptor); + const raw = { + id: 1, + username: 'Alice', + _CHANGE_TYPE: 'INSERT', + _CHANGE_SEQUENCE_NUMBER: 'FF', + }; + const serialized = TestProto.encode(raw).finish(); + const decoded = TestProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); + it('nested struct', () => { const schema = { fields: [ From ebac7ceae882911b6d72fb37c3a9cdd4bd506222 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 10 Sep 2024 10:48:36 -0700 Subject: [PATCH 285/333] feat: support flexible columns on adapt package (#474) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: support flexible columns on adapt package * fix: lint issue * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: use flexible field names within JSONEncoder --------- Co-authored-by: Owl Bot --- .../bigquery-storage/protos/protos.d.ts | 9 -- handwritten/bigquery-storage/protos/protos.js | 21 +---- .../bigquery-storage/protos/protos.json | 3 + .../bigquery-storage/src/adapt/proto.ts | 80 +++++++++++++++++- .../src/managedwriter/encoder.ts | 18 +++- .../system-test/managed_writer_client_test.ts | 83 +++++++++++++++++++ .../bigquery-storage/test/adapt/proto.ts | 75 +++++++++++++++++ 7 files changed, 259 insertions(+), 30 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 6a9bd20d350..9592df04fc8 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1649,9 +1649,6 @@ export namespace google { /** ReadRowsResponse schema. */ public schema?: ("avroSchema"|"arrowSchema"); - /** ReadRowsResponse _uncompressedByteSize. */ - public _uncompressedByteSize?: "uncompressedByteSize"; - /** * Creates a new ReadRowsResponse instance using the specified properties. * @param [properties] Properties to set @@ -3788,12 +3785,6 @@ export namespace google { /** TableReadOptions outputFormatSerializationOptions. */ public outputFormatSerializationOptions?: ("arrowSerializationOptions"|"avroSerializationOptions"); - /** TableReadOptions _samplePercentage. */ - public _samplePercentage?: "samplePercentage"; - - /** TableReadOptions _responseCompressionCodec. */ - public _responseCompressionCodec?: "responseCompressionCodec"; - /** * Creates a new TableReadOptions instance using the specified properties. * @param [properties] Properties to set diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index f27ee65ad6a..360d687ad51 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -3510,12 +3510,7 @@ set: $util.oneOfSetter($oneOfFields) }); - /** - * ReadRowsResponse _uncompressedByteSize. - * @member {"uncompressedByteSize"|undefined} _uncompressedByteSize - * @memberof google.cloud.bigquery.storage.v1.ReadRowsResponse - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ReadRowsResponse.prototype, "_uncompressedByteSize", { get: $util.oneOfGetter($oneOfFields = ["uncompressedByteSize"]), set: $util.oneOfSetter($oneOfFields) @@ -9045,23 +9040,13 @@ set: $util.oneOfSetter($oneOfFields) }); - /** - * TableReadOptions _samplePercentage. - * @member {"samplePercentage"|undefined} _samplePercentage - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(TableReadOptions.prototype, "_samplePercentage", { get: $util.oneOfGetter($oneOfFields = ["samplePercentage"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * TableReadOptions _responseCompressionCodec. - * @member {"responseCompressionCodec"|undefined} _responseCompressionCodec - * @memberof google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(TableReadOptions.prototype, "_responseCompressionCodec", { get: $util.oneOfGetter($oneOfFields = ["responseCompressionCodec"]), set: $util.oneOfSetter($oneOfFields) diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 63e97dd1808..e5e3e0295c4 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1,4 +1,7 @@ { + "options": { + "syntax": "proto3" + }, "nested": { "google": { "nested": { diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 96ffc335f60..5ea786bbd23 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -54,6 +54,13 @@ const packedTypes: FieldDescriptorProtoType[] = [ ]; /** Builds a DescriptorProto for a given table schema using proto2 syntax. + * + * If a column name is not compatible as a protobuf field name, it will be replaced + * with a placeholder name. The placeholder name is generated by removing all invalid + * characters from the column name and replacing all dashes with underscores. If the + * column name doesn't have any valid characters, we generate a placeholder name using + * the field number `field{fieldNumber}`. + * * @param schema - a BigQuery Storage TableSchema. * @param scope - scope to namespace protobuf structs. * @returns DescriptorProto @@ -73,6 +80,13 @@ export function convertStorageSchemaToProto2Descriptor( } /** Builds a DescriptorProto for a given table schema using proto3 syntax. + * + * If a column name is not compatible as a protobuf field name, it will be replaced + * with a placeholder name. The placeholder name is generated by removing all invalid + * characters from the column name and replacing all dashes with underscores. If the + * column name doesn't have any valid characters, we generate a placeholder name using + * the field number `field{fieldNumber}`. + * * @param schema - a Bigquery TableSchema. * @param scope - scope to namespace protobuf structs. * @returns DescriptorProto @@ -293,7 +307,14 @@ function convertTableFieldSchemaToFieldDescriptorProto( scope: string, useProto3: boolean ): FieldDescriptorProto { - const name = field.name; + let name = field.name; + if (!name) { + throw Error('table field missing name'); + } + const isNameCompatible = isProtoCompatible(name); + if (!isNameCompatible) { + name = generatePlaceholderFieldName(name); + } const type = normalizeFieldType(field); if (!type) { throw Error(`table field ${name} missing type`); @@ -327,9 +348,66 @@ function convertTableFieldSchemaToFieldDescriptorProto( proto3Optional: isProto3Optional(label, useProto3), }); } + if (!isNameCompatible) { + if (!fdp.options) { + fdp.options = {}; + } + fdp.options['.google.cloud.bigquery.storage.v1.columnName'] = field.name; + } return fdp; } +/** Checks if the field name is compatible with proto field naming convention. + * + * @internal + * @param fieldName name for the field + * @return true if the field name is comptaible with proto naming convention, + * otherwise, returns false. + */ +export function isProtoCompatible(fieldName: string): boolean { + if (fieldName.length < 1) { + return false; + } + const ch = fieldName.charAt(0); + if (!((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch === '_')) { + return false; + } + for (let i = 1; i < fieldName.length; i++) { + const ch = fieldName.charAt(i); + if ( + !( + (ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z') || + ch === '_' || + (ch >= '0' && ch <= '9') + ) + ) { + return false; + } + } + + return true; +} + +/** Generates a placeholder name that consists of a sanitized field name with only valid characters. + * If the field doesn't have any valid characters, we generate a placeholder name using the field number. + * We replace all dashes with underscores as they are not allowed for proto field names. + * + * @internal + * @param fieldName table field name + * @param fNumber proto field number + * @return the generated placeholder field name + */ +export function generatePlaceholderFieldName(fieldName: string): string { + return ( + 'field_' + + Buffer.from(fieldName) + .toString('base64') + .replace(/[^a-zA-Z0-9-_]/g, '') + .replace(/-/g, '_') + ); +} + function shouldPackType( t: FieldDescriptorProtoType, label: FieldDescriptorProtoLabel | null, diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts index ece1f98de11..72890f5bde1 100644 --- a/handwritten/bigquery-storage/src/managedwriter/encoder.ts +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -14,7 +14,11 @@ import * as protobuf from 'protobufjs'; import * as protos from '../../protos/protos'; -import {normalizeDescriptor} from '../adapt/proto'; +import { + generatePlaceholderFieldName, + isProtoCompatible, + normalizeDescriptor, +} from '../adapt/proto'; import * as extend from 'extend'; import {JSONObject, JSONValue} from './json_writer'; @@ -90,7 +94,17 @@ export class JSONEncoder { private convertRow(source: JSONObject, ptype: protobuf.Type): JSONObject { const row = extend(true, {}, source); - for (const key in row) { + const keys = Object.keys(row).map(key => { + if (!isProtoCompatible(key)) { + const newFieldName = generatePlaceholderFieldName(key); + // swap original key with placeholder field name + row[newFieldName] = row[key]; + delete row[key]; + return newFieldName; + } + return key; + }); + for (const key of keys) { const value = row[key]; if (value === null) { continue; diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 614ecca3d8f..0b5eaae51ed 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -920,6 +920,89 @@ describe('managedwriter.WriterClient', () => { client.close(); } }); + + it('Flexible Columns and annotations', async () => { + bqWriteClient.initialize(); + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const schema: TableSchema = { + fields: [ + { + name: '特別コラム', + type: 'INTEGER', + mode: 'REQUIRED', + }, + { + name: 'second', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'third-column', + type: 'STRING', + mode: 'REQUIRED', + }, + ], + }; + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId + '_flexible', {schema}); + const parent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + const row1 = { + 特別コラム: 1, + second: 'second_value', + 'third-column': 'another_value', + }; + + const row2 = { + 特別コラム: 2, + second: 'another_one', + 'third-column': 'yet_another', + }; + + try { + const connection = await client.createStreamConnection({ + streamId: managedwriter.DefaultStream, + destinationTable: parent, + }); + + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + + const pw = writer.appendRows([row1, row2]); + await pw.getResult(); + + const [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\`` + ); + assert.strictEqual(rows.length, 2); + assert.deepStrictEqual(rows, [ + { + 特別コラム: 1, + second: 'second_value', + 'third-column': 'another_value', + }, + { + 特別コラム: 2, + second: 'another_one', + 'third-column': 'yet_another', + }, + ]); + + writer.close(); + } finally { + client.close(); + } + }); }); it('should fill default values when MissingValuesInterpretation is set', async () => { diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 2e34b02bf26..41ae926e597 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -114,6 +114,81 @@ describe('Adapt Protos', () => { assert.deepEqual(raw, decoded); }); + it('basic with Flexible column fields', () => { + const schema = { + fields: [ + { + name: '特別コラム', + type: 'INTEGER', + mode: 'NULLABLE', + }, + { + name: 'field-name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'foo_👍', + type: 'STRING', + mode: 'REQUIRED', + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Flexible' + ); + assert.notEqual(protoDescriptor, null); + + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + assert.deepEqual(JSON.parse(JSON.stringify(protoDescriptor)), { + name: 'Flexible', + field: [ + { + name: 'field_54m55Yil44Kz44Op44Og', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_INT64', + options: { + '.google.cloud.bigquery.storage.v1.columnName': '特別コラム', + }, + }, + { + name: 'field_ZmllbGQtbmFtZQ', + number: 2, + label: 'LABEL_REQUIRED', + type: 'TYPE_STRING', + options: { + '.google.cloud.bigquery.storage.v1.columnName': 'field-name', + }, + }, + { + name: 'field_Zm9vXCfkY0', + number: 3, + label: 'LABEL_REQUIRED', + type: 'TYPE_STRING', + options: { + '.google.cloud.bigquery.storage.v1.columnName': 'foo_👍', + }, + }, + ], + }); + + const FlexibleProto = Type.fromDescriptor(protoDescriptor); + const raw = { + field_54m55Yil44Kz44Op44Og: 1, + field_ZmllbGQtbmFtZQ: 'test', + field_Zm9vXCfkY0: 'foo', + }; + const serialized = FlexibleProto.encode(raw).finish(); + const decoded = FlexibleProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); + it('nested struct', () => { const schema = { fields: [ From 1c17378f8f62dd6c3ccafbb00a6269952b9e5c6e Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 12 Sep 2024 10:42:46 -0700 Subject: [PATCH 286/333] build: fix path-to-regexp to older version due to node 14 requirement (#475) --- handwritten/bigquery-storage/package.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 0ee9b4b8ff6..ddf993fb4aa 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -52,6 +52,8 @@ "null-loader": "^4.0.0", "pack-n-play": "^2.0.0", "sinon": "^18.0.0", + "nise": "6.0.0", + "path-to-regexp": "6.2.2", "ts-loader": "^9.0.0", "typescript": "^5.1.6", "uuid": "^9.0.0", From 4a90072d846a8c9df954986d936dcdd574752f1d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 15:35:15 -0400 Subject: [PATCH 287/333] chore(main): release 4.9.0 (#461) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.9.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 14 ++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ..._metadata.google.cloud.bigquery.storage.v1.json | 2 +- ..._metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...data.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...data_google.cloud.bigquery.storage.v1beta1.json | 2 +- 6 files changed, 19 insertions(+), 5 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index a759778f613..281c7f25e8c 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [4.9.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.8.0...v4.9.0) (2024-09-12) + + +### Features + +* Support BQ Storage Write CDC ([#473](https://github.com/googleapis/nodejs-bigquery-storage/issues/473)) ([8380ca8](https://github.com/googleapis/nodejs-bigquery-storage/commit/8380ca8896f6043aba7b756b2f974896bb42468e)) +* Support flexible columns on adapt package ([#474](https://github.com/googleapis/nodejs-bigquery-storage/issues/474)) ([c521cc5](https://github.com/googleapis/nodejs-bigquery-storage/commit/c521cc5b3085ca73ae490ec7436cec14d3ad53e5)) +* Update Nodejs generator to send API versions in headers for GAPICs ([#459](https://github.com/googleapis/nodejs-bigquery-storage/issues/459)) ([9dca0d3](https://github.com/googleapis/nodejs-bigquery-storage/commit/9dca0d3fa35d4eebec5f845151d1ea8a2b3ba837)) + + +### Bug Fixes + +* Typo - commited -> committed ([#468](https://github.com/googleapis/nodejs-bigquery-storage/issues/468)) ([672ab7d](https://github.com/googleapis/nodejs-bigquery-storage/commit/672ab7d2846e9379bb6320e8e39e8d53b9d8199e)) + ## [4.8.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.7.0...v4.8.0) (2024-05-10) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index ddf993fb4aa..88af3073770 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.8.0", + "version": "4.9.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index a50f511e516..e10d9a21daa 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.8.0", + "version": "4.9.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 92c2563a441..e1599cc1957 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.8.0", + "version": "4.9.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 5ba0d32f04d..1c0fa7caacb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.8.0", + "version": "4.9.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 5ba0d32f04d..1c0fa7caacb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.8.0", + "version": "4.9.0", "language": "TYPESCRIPT", "apis": [ { From c60587deba8f95c7f62f6c0b2e5c17e83540497f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 13 Sep 2024 16:00:29 +0200 Subject: [PATCH 288/333] chore(deps): update dependency path-to-regexp to v6.3.0 (#477) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [path-to-regexp](https://redirect.github.com/pillarjs/path-to-regexp) | [`6.2.2` -> `6.3.0`](https://renovatebot.com/diffs/npm/path-to-regexp/6.2.2/6.3.0) | [![age](https://developer.mend.io/api/mc/badges/age/npm/path-to-regexp/6.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/npm/path-to-regexp/6.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/npm/path-to-regexp/6.2.2/6.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/npm/path-to-regexp/6.2.2/6.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pillarjs/path-to-regexp (path-to-regexp) ### [`v6.3.0`](https://redirect.github.com/pillarjs/path-to-regexp/releases/tag/v6.3.0): Fix backtracking in 6.x [Compare Source](https://redirect.github.com/pillarjs/path-to-regexp/compare/v6.2.2...v6.3.0) **Fixed** - Add backtrack protection to 6.x ([#​324](https://redirect.github.com/pillarjs/path-to-regexp/issues/324)) [`f1253b4`](https://redirect.github.com/pillarjs/path-to-regexp/commit/f1253b4)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 9am and before 3pm" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/googleapis/nodejs-bigquery-storage). --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 88af3073770..c0c086a5e0c 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -53,7 +53,7 @@ "pack-n-play": "^2.0.0", "sinon": "^18.0.0", "nise": "6.0.0", - "path-to-regexp": "6.2.2", + "path-to-regexp": "6.3.0", "ts-loader": "^9.0.0", "typescript": "^5.1.6", "uuid": "^9.0.0", From 23ceffd0ebaed058ceb99bca72fa3ae771b8ce29 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 16:15:12 -0400 Subject: [PATCH 289/333] feat: Add BigQuery Metastore Partition Service API version v1alpha (#471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: A comment for message `StreamMetastorePartitionsRequest` is changed docs: A comment for message `StreamMetastorePartitionsResponse` is changed docs: A comment for field `location_uri` in message `.google.cloud.bigquery.storage.v1alpha.StorageDescriptor` is changed PiperOrigin-RevId: 670602530 Source-Link: https://github.com/googleapis/googleapis/commit/9c6ceea874182fbfda3e59faba5df35906567c5b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ab1ed59ce43ba96e71cb589704339f3b8c6e524 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWFiMWVkNTljZTQzYmE5NmU3MWNiNTg5NzA0MzM5ZjNiOGM2ZTUyNCJ9 feat: add documentation for partition value limit BREAKING CHANGE: make the client library gRPC only PiperOrigin-RevId: 666551276 Source-Link: https://github.com/googleapis/googleapis/commit/6f3c628e7fc39b5ca7186aba1a67ae39454d0752 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9c599698f90ae56a61e38a266bd7705614c5dff6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWM1OTk2OThmOTBhZTU2YTYxZTM4YTI2NmJkNzcwNTYxNGM1ZGZmNiJ9 feat: Support for a custom error message for BatchSizeTooLargeError PiperOrigin-RevId: 665560115 Source-Link: https://github.com/googleapis/googleapis/commit/869c2e1fff95ac1459b913cfde1c34296b75f3b5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb7632b5619fde8e24e47a30a8b5a720ac9886e2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2I3NjMyYjU2MTlmZGU4ZTI0ZTQ3YTMwYThiNWE3MjBhYzk4ODZlMiJ9 feat: Add BigQuery Metastore Partition Service API version v1alpha PiperOrigin-RevId: 662212485 Source-Link: https://github.com/googleapis/googleapis/commit/456a812fbc03ef50e253dc85f2b2c22a8af96d36 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ec266e6da03208a76b0fd6001ba7df93dae44e6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmVjMjY2ZTZkYTAzMjA4YTc2YjBmZDYwMDFiYTdkZjkzZGFlNDRlNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Increase method timeout to 240s for BigQuery Metastore Partition Service API version v1alpha PiperOrigin-RevId: 676173688 Source-Link: https://github.com/googleapis/googleapis/commit/02f118441fd76957d594f3a489b3b1f840fc66c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/93f225b1e5c89712fa17dc398f990bb1cd927025 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTNmMjI1YjFlNWM4OTcxMmZhMTdkYzM5OGY5OTBiYjFjZDkyNzAyNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: export v1alpha api * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- .../storage/v1alpha/metastore_partition.proto | 282 + .../bigquery/storage/v1alpha/partition.proto | 140 + .../bigquery-storage/protos/protos.d.ts | 2305 +++++++ handwritten/bigquery-storage/protos/protos.js | 5415 +++++++++++++++++ .../bigquery-storage/protos/protos.json | 489 ++ ...rvice.batch_create_metastore_partitions.js | 76 + ...rvice.batch_delete_metastore_partitions.js | 69 + ...rvice.batch_update_metastore_partitions.js | 68 + ...ition_service.list_metastore_partitions.js | 75 + ...ion_service.stream_metastore_partitions.js | 82 + ...google.cloud.bigquery.storage.v1alpha.json | 243 + handwritten/bigquery-storage/src/index.ts | 2 + .../src/v1alpha/gapic_metadata.json | 68 + .../bigquery-storage/src/v1alpha/index.ts | 19 + .../metastore_partition_service_client.ts | 1002 +++ ...store_partition_service_client_config.json | 54 + ...etastore_partition_service_proto_list.json | 4 + ...pic_metastore_partition_service_v1alpha.ts | 1131 ++++ 18 files changed, 11524 insertions(+) create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json create mode 100644 handwritten/bigquery-storage/src/v1alpha/gapic_metadata.json create mode 100644 handwritten/bigquery-storage/src/v1alpha/index.ts create mode 100644 handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts create mode 100644 handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_proto_list.json create mode 100644 handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto new file mode 100644 index 00000000000..d34ceed42b0 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto @@ -0,0 +1,282 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1alpha/partition.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1Alpha"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1alpha/storagepb;storagepb"; +option java_multiple_files = true; +option java_outer_classname = "MetastorePartitionServiceProto"; +option java_package = "com.google.cloud.bigquery.storage.v1alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1alpha"; +option (google.api.resource_definition) = { + type: "bigquery.googleapis.com/Table" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}" +}; + +// BigQuery Metastore Partition Service API. +// This service is used for managing metastore partitions in BigQuery metastore. +// The service supports only batch operations for write. +service MetastorePartitionService { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/cloud-platform"; + + // Adds metastore partitions to a table. + rpc BatchCreateMetastorePartitions(BatchCreateMetastorePartitionsRequest) + returns (BatchCreateMetastorePartitionsResponse) { + option (google.api.http) = { + post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate" + body: "*" + }; + } + + // Deletes metastore partitions from a table. + rpc BatchDeleteMetastorePartitions(BatchDeleteMetastorePartitionsRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete" + body: "*" + }; + } + + // Updates metastore partitions in a table. + rpc BatchUpdateMetastorePartitions(BatchUpdateMetastorePartitionsRequest) + returns (BatchUpdateMetastorePartitionsResponse) { + option (google.api.http) = { + post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate" + body: "*" + }; + } + + // Gets metastore partitions from a table. + rpc ListMetastorePartitions(ListMetastorePartitionsRequest) + returns (ListMetastorePartitionsResponse) { + option (google.api.http) = { + get: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list" + }; + option (google.api.method_signature) = "parent"; + } + + // This is a bi-di streaming rpc method that allows the client to send + // a stream of partitions and commit all of them atomically at the end. + // If the commit is successful, the server will return a + // response and close the stream. If the commit fails (due to duplicate + // partitions or other reason), the server will close the stream with an + // error. This method is only available via the gRPC API (not REST). + rpc StreamMetastorePartitions(stream StreamMetastorePartitionsRequest) + returns (stream StreamMetastorePartitionsResponse) {} +} + +// Request message for CreateMetastorePartition. The MetastorePartition is +// uniquely identified by values, which is an ordered list. Hence, there is no +// separate name or partition id field. +message CreateMetastorePartitionRequest { + // Required. Reference to the table to where the metastore partition to be + // added, in the format of + // projects/{project}/databases/{databases}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. The metastore partition to be added. + MetastorePartition metastore_partition = 2 + [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for BatchCreateMetastorePartitions. +message BatchCreateMetastorePartitionsRequest { + // Required. Reference to the table to where the metastore partitions to be + // added, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. Requests to add metastore partitions to the table. + repeated CreateMetastorePartitionRequest requests = 2 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. Mimics the ifNotExists flag in IMetaStoreClient + // add_partitions(..). If the flag is set to false, the server will return + // ALREADY_EXISTS if any partition already exists. If the flag is set to true, + // the server will skip existing partitions and insert only the non-existing + // partitions. + bool skip_existing_partitions = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for BatchCreateMetastorePartitions. +message BatchCreateMetastorePartitionsResponse { + // The list of metastore partitions that have been created. + repeated MetastorePartition partitions = 1; +} + +// Request message for BatchDeleteMetastorePartitions. The MetastorePartition is +// uniquely identified by values, which is an ordered list. Hence, there is no +// separate name or partition id field. +message BatchDeleteMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. The list of metastore partitions (identified by its values) to be + // deleted. A maximum of 100 partitions can be deleted in a batch. + repeated MetastorePartitionValues partition_values = 2 + [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for UpdateMetastorePartition. +message UpdateMetastorePartitionRequest { + // Required. The metastore partition to be updated. + MetastorePartition metastore_partition = 1 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. The list of fields to update. + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = OPTIONAL]; +} + +// Request message for BatchUpdateMetastorePartitions. +message BatchUpdateMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. Requests to update metastore partitions in the table. + repeated UpdateMetastorePartitionRequest requests = 2 + [(google.api.field_behavior) = REQUIRED]; +} + +// Response message for BatchUpdateMetastorePartitions. +message BatchUpdateMetastorePartitionsResponse { + // The list of metastore partitions that have been updated. + repeated MetastorePartition partitions = 1; +} + +// Request message for ListMetastorePartitions. +message ListMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Optional. SQL text filtering statement, similar to a WHERE clause in a + // query. Only supports single-row expressions. Aggregate functions are not + // supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + // Restricted to a maximum length for 1 MB. + string filter = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for ListMetastorePartitions. +message ListMetastorePartitionsResponse { + // The response depends on the number of metastore partitions to be returned; + // it can be a list of partitions or a list of + // [ReadStream]((https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#readstream)) + // objects. For the second situation, the BigQuery [Read API + // ReadRows](https://cloud.google.com/bigquery/docs/reference/storage#read_from_a_session_stream) + // method must be used to stream the data and convert it into a list of + // partitions. + oneof response { + // The list of partitions. + MetastorePartitionList partitions = 1; + + // The list of streams. + StreamList streams = 2; + } +} + +// The top-level message sent by the client to the +// [Partitions.StreamMetastorePartitions][] method. +// Follows the default gRPC streaming maximum size of 4 MB. +message StreamMetastorePartitionsRequest { + // Required. Reference to the table to where the partition to be added, in the + // format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Optional. A list of metastore partitions to be added to the table. + repeated MetastorePartition metastore_partitions = 2 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Mimics the ifNotExists flag in IMetaStoreClient + // add_partitions(..). If the flag is set to false, the server will return + // ALREADY_EXISTS on commit if any partition already exists. If the flag is + // set to true: + // 1) the server will skip existing partitions + // insert only the non-existing partitions as part of the commit. + // 2) The client must set the `skip_existing_partitions` field to true for + // all requests in the stream. + bool skip_existing_partitions = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// This is the response message sent by the server +// to the client for the [Partitions.StreamMetastorePartitions][] method when +// the commit is successful. Server will close the stream after sending this +// message. +message StreamMetastorePartitionsResponse { + // Total count of partitions streamed by the client during the lifetime of the + // stream. This is only set in the final response message before closing the + // stream. + int64 total_partitions_streamed_count = 2; + + // Total count of partitions inserted by the server during the lifetime of the + // stream. This is only set in the final response message before closing the + // stream. + int64 total_partitions_inserted_count = 3; +} + +// Structured custom error message for batch size too large error. +// The error can be attached as error details in the returned rpc Status for +// more structured error handling in the client. +message BatchSizeTooLargeError { + // The maximum number of items that are supported in a single batch. This is + // returned as a hint to the client to adjust the batch size. + int64 max_batch_size = 1; + + // Optional. The error message that is returned to the client. + string error_message = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto new file mode 100644 index 00000000000..d00b23d52ea --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto @@ -0,0 +1,140 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1Alpha"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1alpha/storagepb;storagepb"; +option java_multiple_files = true; +option java_outer_classname = "MetastorePartitionProto"; +option java_package = "com.google.cloud.bigquery.storage.v1alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1alpha"; + +// Schema description of a metastore partition column. +message FieldSchema { + // Required. The name of the column. + // The maximum length of the name is 1024 characters + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The type of the metastore partition column. Maximum allowed + // length is 1024 characters. + string type = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Contains information about the physical storage of the data in the metastore +// partition. +message StorageDescriptor { + // Optional. The physical location of the metastore partition + // (e.g. `gs://spark-dataproc-data/pangea-data/case_sensitive/` or + // `gs://spark-dataproc-data/pangea-data/*`). + string location_uri = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the fully qualified class name of the InputFormat + // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). + // The maximum length is 128 characters. + string input_format = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the fully qualified class name of the OutputFormat + // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). + // The maximum length is 128 characters. + string output_format = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Serializer and deserializer information. + SerDeInfo serde_info = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Serializer and deserializer information. +message SerDeInfo { + // Optional. Name of the SerDe. + // The maximum length is 256 characters. + string name = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Specifies a fully-qualified class name of the serialization + // library that is responsible for the translation of data between table + // representation and the underlying low-level input and output format + // structures. The maximum length is 256 characters. + string serialization_library = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Key-value pairs that define the initialization parameters for the + // serialization library. + // Maximum size 10 Kib. + map parameters = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Information about a Hive partition. +message MetastorePartition { + // Required. Represents the values of the partition keys, where each value + // corresponds to a specific partition key in the order in which the keys are + // defined. Each value is limited to 1024 characters. + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. The creation time of the partition. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Contains information about the physical storage of the data in + // the partition. + StorageDescriptor storage_descriptor = 3 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Additional parameters or metadata associated with the partition. + // Maximum size 10 KiB. + map parameters = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. List of columns. + repeated FieldSchema fields = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// List of metastore partitions. +message MetastorePartitionList { + // Required. List of partitions. + repeated MetastorePartition partitions = 1 + [(google.api.field_behavior) = REQUIRED]; +} + +// Information about a single stream that is used to read partitions. +message ReadStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadStream" + pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + plural: "readStreams" + singular: "readStream" + }; + + // Output only. Identifier. Name of the stream, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IDENTIFIER + ]; +} + +// List of streams. +message StreamList { + // Output only. List of streams. + repeated ReadStream streams = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Represents the values of a metastore partition. +message MetastorePartitionValues { + // Required. The values of the partition keys, where each value corresponds to + // a specific partition key in the order in which the keys are defined. + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 9592df04fc8..c258a01a313 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4505,6 +4505,2214 @@ export namespace google { } } + /** Namespace v1alpha. */ + namespace v1alpha { + + /** Represents a MetastorePartitionService */ + class MetastorePartitionService extends $protobuf.rpc.Service { + + /** + * Constructs a new MetastorePartitionService service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new MetastorePartitionService service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): MetastorePartitionService; + + /** + * Calls BatchCreateMetastorePartitions. + * @param request BatchCreateMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCreateMetastorePartitionsResponse + */ + public batchCreateMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitionsCallback): void; + + /** + * Calls BatchCreateMetastorePartitions. + * @param request BatchCreateMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchCreateMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest): Promise; + + /** + * Calls BatchDeleteMetastorePartitions. + * @param request BatchDeleteMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public batchDeleteMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitionsCallback): void; + + /** + * Calls BatchDeleteMetastorePartitions. + * @param request BatchDeleteMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchDeleteMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest): Promise; + + /** + * Calls BatchUpdateMetastorePartitions. + * @param request BatchUpdateMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchUpdateMetastorePartitionsResponse + */ + public batchUpdateMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitionsCallback): void; + + /** + * Calls BatchUpdateMetastorePartitions. + * @param request BatchUpdateMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchUpdateMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest): Promise; + + /** + * Calls ListMetastorePartitions. + * @param request ListMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ListMetastorePartitionsResponse + */ + public listMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitionsCallback): void; + + /** + * Calls ListMetastorePartitions. + * @param request ListMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public listMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest): Promise; + + /** + * Calls StreamMetastorePartitions. + * @param request StreamMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and StreamMetastorePartitionsResponse + */ + public streamMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitionsCallback): void; + + /** + * Calls StreamMetastorePartitions. + * @param request StreamMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public streamMetastorePartitions(request: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest): Promise; + } + + namespace MetastorePartitionService { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchCreateMetastorePartitions}. + * @param error Error, if any + * @param [response] BatchCreateMetastorePartitionsResponse + */ + type BatchCreateMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchDeleteMetastorePartitions}. + * @param error Error, if any + * @param [response] Empty + */ + type BatchDeleteMetastorePartitionsCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchUpdateMetastorePartitions}. + * @param error Error, if any + * @param [response] BatchUpdateMetastorePartitionsResponse + */ + type BatchUpdateMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|listMetastorePartitions}. + * @param error Error, if any + * @param [response] ListMetastorePartitionsResponse + */ + type ListMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|streamMetastorePartitions}. + * @param error Error, if any + * @param [response] StreamMetastorePartitionsResponse + */ + type StreamMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse) => void; + } + + /** Properties of a CreateMetastorePartitionRequest. */ + interface ICreateMetastorePartitionRequest { + + /** CreateMetastorePartitionRequest parent */ + parent?: (string|null); + + /** CreateMetastorePartitionRequest metastorePartition */ + metastorePartition?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null); + } + + /** Represents a CreateMetastorePartitionRequest. */ + class CreateMetastorePartitionRequest implements ICreateMetastorePartitionRequest { + + /** + * Constructs a new CreateMetastorePartitionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest); + + /** CreateMetastorePartitionRequest parent. */ + public parent: string; + + /** CreateMetastorePartitionRequest metastorePartition. */ + public metastorePartition?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null); + + /** + * Creates a new CreateMetastorePartitionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateMetastorePartitionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest): google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest; + + /** + * Encodes the specified CreateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.verify|verify} messages. + * @param message CreateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.verify|verify} messages. + * @param message CreateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest; + + /** + * Verifies a CreateMetastorePartitionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateMetastorePartitionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest; + + /** + * Creates a plain object from a CreateMetastorePartitionRequest message. Also converts values to other types if specified. + * @param message CreateMetastorePartitionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateMetastorePartitionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateMetastorePartitionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateMetastorePartitionsRequest. */ + interface IBatchCreateMetastorePartitionsRequest { + + /** BatchCreateMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchCreateMetastorePartitionsRequest requests */ + requests?: (google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest[]|null); + + /** BatchCreateMetastorePartitionsRequest skipExistingPartitions */ + skipExistingPartitions?: (boolean|null); + } + + /** Represents a BatchCreateMetastorePartitionsRequest. */ + class BatchCreateMetastorePartitionsRequest implements IBatchCreateMetastorePartitionsRequest { + + /** + * Constructs a new BatchCreateMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest); + + /** BatchCreateMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchCreateMetastorePartitionsRequest requests. */ + public requests: google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest[]; + + /** BatchCreateMetastorePartitionsRequest skipExistingPartitions. */ + public skipExistingPartitions: boolean; + + /** + * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest; + + /** + * Verifies a BatchCreateMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchCreateMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateMetastorePartitionsResponse. */ + interface IBatchCreateMetastorePartitionsResponse { + + /** BatchCreateMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]|null); + } + + /** Represents a BatchCreateMetastorePartitionsResponse. */ + class BatchCreateMetastorePartitionsResponse implements IBatchCreateMetastorePartitionsResponse { + + /** + * Constructs a new BatchCreateMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse); + + /** BatchCreateMetastorePartitionsResponse partitions. */ + public partitions: google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]; + + /** + * Creates a new BatchCreateMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse; + + /** + * Verifies a BatchCreateMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message BatchCreateMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchDeleteMetastorePartitionsRequest. */ + interface IBatchDeleteMetastorePartitionsRequest { + + /** BatchDeleteMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchDeleteMetastorePartitionsRequest partitionValues */ + partitionValues?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues[]|null); + } + + /** Represents a BatchDeleteMetastorePartitionsRequest. */ + class BatchDeleteMetastorePartitionsRequest implements IBatchDeleteMetastorePartitionsRequest { + + /** + * Constructs a new BatchDeleteMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest); + + /** BatchDeleteMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchDeleteMetastorePartitionsRequest partitionValues. */ + public partitionValues: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues[]; + + /** + * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchDeleteMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest): google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest; + + /** + * Verifies a BatchDeleteMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchDeleteMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchDeleteMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchDeleteMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchDeleteMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchDeleteMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchDeleteMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an UpdateMetastorePartitionRequest. */ + interface IUpdateMetastorePartitionRequest { + + /** UpdateMetastorePartitionRequest metastorePartition */ + metastorePartition?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null); + + /** UpdateMetastorePartitionRequest updateMask */ + updateMask?: (google.protobuf.IFieldMask|null); + } + + /** Represents an UpdateMetastorePartitionRequest. */ + class UpdateMetastorePartitionRequest implements IUpdateMetastorePartitionRequest { + + /** + * Constructs a new UpdateMetastorePartitionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest); + + /** UpdateMetastorePartitionRequest metastorePartition. */ + public metastorePartition?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null); + + /** UpdateMetastorePartitionRequest updateMask. */ + public updateMask?: (google.protobuf.IFieldMask|null); + + /** + * Creates a new UpdateMetastorePartitionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns UpdateMetastorePartitionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest): google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.verify|verify} messages. + * @param message UpdateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.verify|verify} messages. + * @param message UpdateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest; + + /** + * Verifies an UpdateMetastorePartitionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UpdateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UpdateMetastorePartitionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest; + + /** + * Creates a plain object from an UpdateMetastorePartitionRequest message. Also converts values to other types if specified. + * @param message UpdateMetastorePartitionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UpdateMetastorePartitionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UpdateMetastorePartitionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchUpdateMetastorePartitionsRequest. */ + interface IBatchUpdateMetastorePartitionsRequest { + + /** BatchUpdateMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchUpdateMetastorePartitionsRequest requests */ + requests?: (google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest[]|null); + } + + /** Represents a BatchUpdateMetastorePartitionsRequest. */ + class BatchUpdateMetastorePartitionsRequest implements IBatchUpdateMetastorePartitionsRequest { + + /** + * Constructs a new BatchUpdateMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest); + + /** BatchUpdateMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchUpdateMetastorePartitionsRequest requests. */ + public requests: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest[]; + + /** + * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchUpdateMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest; + + /** + * Verifies a BatchUpdateMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchUpdateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchUpdateMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchUpdateMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchUpdateMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchUpdateMetastorePartitionsResponse. */ + interface IBatchUpdateMetastorePartitionsResponse { + + /** BatchUpdateMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]|null); + } + + /** Represents a BatchUpdateMetastorePartitionsResponse. */ + class BatchUpdateMetastorePartitionsResponse implements IBatchUpdateMetastorePartitionsResponse { + + /** + * Constructs a new BatchUpdateMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse); + + /** BatchUpdateMetastorePartitionsResponse partitions. */ + public partitions: google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]; + + /** + * Creates a new BatchUpdateMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchUpdateMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse; + + /** + * Verifies a BatchUpdateMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchUpdateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchUpdateMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message BatchUpdateMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchUpdateMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ListMetastorePartitionsRequest. */ + interface IListMetastorePartitionsRequest { + + /** ListMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** ListMetastorePartitionsRequest filter */ + filter?: (string|null); + } + + /** Represents a ListMetastorePartitionsRequest. */ + class ListMetastorePartitionsRequest implements IListMetastorePartitionsRequest { + + /** + * Constructs a new ListMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest); + + /** ListMetastorePartitionsRequest parent. */ + public parent: string; + + /** ListMetastorePartitionsRequest filter. */ + public filter: string; + + /** + * Creates a new ListMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ListMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest; + + /** + * Encodes the specified ListMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest.verify|verify} messages. + * @param message ListMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest.verify|verify} messages. + * @param message ListMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest; + + /** + * Verifies a ListMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest; + + /** + * Creates a plain object from a ListMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message ListMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ListMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ListMetastorePartitionsResponse. */ + interface IListMetastorePartitionsResponse { + + /** ListMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList|null); + + /** ListMetastorePartitionsResponse streams */ + streams?: (google.cloud.bigquery.storage.v1alpha.IStreamList|null); + } + + /** Represents a ListMetastorePartitionsResponse. */ + class ListMetastorePartitionsResponse implements IListMetastorePartitionsResponse { + + /** + * Constructs a new ListMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse); + + /** ListMetastorePartitionsResponse partitions. */ + public partitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList|null); + + /** ListMetastorePartitionsResponse streams. */ + public streams?: (google.cloud.bigquery.storage.v1alpha.IStreamList|null); + + /** ListMetastorePartitionsResponse response. */ + public response?: ("partitions"|"streams"); + + /** + * Creates a new ListMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ListMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse; + + /** + * Encodes the specified ListMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.verify|verify} messages. + * @param message ListMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.verify|verify} messages. + * @param message ListMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse; + + /** + * Verifies a ListMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse; + + /** + * Creates a plain object from a ListMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message ListMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ListMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamMetastorePartitionsRequest. */ + interface IStreamMetastorePartitionsRequest { + + /** StreamMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** StreamMetastorePartitionsRequest metastorePartitions */ + metastorePartitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]|null); + + /** StreamMetastorePartitionsRequest skipExistingPartitions */ + skipExistingPartitions?: (boolean|null); + } + + /** Represents a StreamMetastorePartitionsRequest. */ + class StreamMetastorePartitionsRequest implements IStreamMetastorePartitionsRequest { + + /** + * Constructs a new StreamMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest); + + /** StreamMetastorePartitionsRequest parent. */ + public parent: string; + + /** StreamMetastorePartitionsRequest metastorePartitions. */ + public metastorePartitions: google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]; + + /** StreamMetastorePartitionsRequest skipExistingPartitions. */ + public skipExistingPartitions: boolean; + + /** + * Creates a new StreamMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.verify|verify} messages. + * @param message StreamMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.verify|verify} messages. + * @param message StreamMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest; + + /** + * Verifies a StreamMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest; + + /** + * Creates a plain object from a StreamMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message StreamMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamMetastorePartitionsResponse. */ + interface IStreamMetastorePartitionsResponse { + + /** StreamMetastorePartitionsResponse totalPartitionsStreamedCount */ + totalPartitionsStreamedCount?: (number|Long|string|null); + + /** StreamMetastorePartitionsResponse totalPartitionsInsertedCount */ + totalPartitionsInsertedCount?: (number|Long|string|null); + } + + /** Represents a StreamMetastorePartitionsResponse. */ + class StreamMetastorePartitionsResponse implements IStreamMetastorePartitionsResponse { + + /** + * Constructs a new StreamMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse); + + /** StreamMetastorePartitionsResponse totalPartitionsStreamedCount. */ + public totalPartitionsStreamedCount: (number|Long|string); + + /** StreamMetastorePartitionsResponse totalPartitionsInsertedCount. */ + public totalPartitionsInsertedCount: (number|Long|string); + + /** + * Creates a new StreamMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse.verify|verify} messages. + * @param message StreamMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse.verify|verify} messages. + * @param message StreamMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse; + + /** + * Verifies a StreamMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse; + + /** + * Creates a plain object from a StreamMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message StreamMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchSizeTooLargeError. */ + interface IBatchSizeTooLargeError { + + /** BatchSizeTooLargeError maxBatchSize */ + maxBatchSize?: (number|Long|string|null); + + /** BatchSizeTooLargeError errorMessage */ + errorMessage?: (string|null); + } + + /** Represents a BatchSizeTooLargeError. */ + class BatchSizeTooLargeError implements IBatchSizeTooLargeError { + + /** + * Constructs a new BatchSizeTooLargeError. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError); + + /** BatchSizeTooLargeError maxBatchSize. */ + public maxBatchSize: (number|Long|string); + + /** BatchSizeTooLargeError errorMessage. */ + public errorMessage: string; + + /** + * Creates a new BatchSizeTooLargeError instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchSizeTooLargeError instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError): google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError; + + /** + * Encodes the specified BatchSizeTooLargeError message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError.verify|verify} messages. + * @param message BatchSizeTooLargeError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchSizeTooLargeError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError.verify|verify} messages. + * @param message BatchSizeTooLargeError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError; + + /** + * Verifies a BatchSizeTooLargeError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchSizeTooLargeError message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchSizeTooLargeError + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError; + + /** + * Creates a plain object from a BatchSizeTooLargeError message. Also converts values to other types if specified. + * @param message BatchSizeTooLargeError + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchSizeTooLargeError to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchSizeTooLargeError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FieldSchema. */ + interface IFieldSchema { + + /** FieldSchema name */ + name?: (string|null); + + /** FieldSchema type */ + type?: (string|null); + } + + /** Represents a FieldSchema. */ + class FieldSchema implements IFieldSchema { + + /** + * Constructs a new FieldSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IFieldSchema); + + /** FieldSchema name. */ + public name: string; + + /** FieldSchema type. */ + public type: string; + + /** + * Creates a new FieldSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IFieldSchema): google.cloud.bigquery.storage.v1alpha.FieldSchema; + + /** + * Encodes the specified FieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.FieldSchema.verify|verify} messages. + * @param message FieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.FieldSchema.verify|verify} messages. + * @param message FieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.FieldSchema; + + /** + * Decodes a FieldSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.FieldSchema; + + /** + * Verifies a FieldSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.FieldSchema; + + /** + * Creates a plain object from a FieldSchema message. Also converts values to other types if specified. + * @param message FieldSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.FieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StorageDescriptor. */ + interface IStorageDescriptor { + + /** StorageDescriptor locationUri */ + locationUri?: (string|null); + + /** StorageDescriptor inputFormat */ + inputFormat?: (string|null); + + /** StorageDescriptor outputFormat */ + outputFormat?: (string|null); + + /** StorageDescriptor serdeInfo */ + serdeInfo?: (google.cloud.bigquery.storage.v1alpha.ISerDeInfo|null); + } + + /** Represents a StorageDescriptor. */ + class StorageDescriptor implements IStorageDescriptor { + + /** + * Constructs a new StorageDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IStorageDescriptor); + + /** StorageDescriptor locationUri. */ + public locationUri: string; + + /** StorageDescriptor inputFormat. */ + public inputFormat: string; + + /** StorageDescriptor outputFormat. */ + public outputFormat: string; + + /** StorageDescriptor serdeInfo. */ + public serdeInfo?: (google.cloud.bigquery.storage.v1alpha.ISerDeInfo|null); + + /** + * Creates a new StorageDescriptor instance using the specified properties. + * @param [properties] Properties to set + * @returns StorageDescriptor instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IStorageDescriptor): google.cloud.bigquery.storage.v1alpha.StorageDescriptor; + + /** + * Encodes the specified StorageDescriptor message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StorageDescriptor.verify|verify} messages. + * @param message StorageDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IStorageDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StorageDescriptor message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StorageDescriptor.verify|verify} messages. + * @param message StorageDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IStorageDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.StorageDescriptor; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.StorageDescriptor; + + /** + * Verifies a StorageDescriptor message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StorageDescriptor message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StorageDescriptor + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.StorageDescriptor; + + /** + * Creates a plain object from a StorageDescriptor message. Also converts values to other types if specified. + * @param message StorageDescriptor + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.StorageDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StorageDescriptor to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StorageDescriptor + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SerDeInfo. */ + interface ISerDeInfo { + + /** SerDeInfo name */ + name?: (string|null); + + /** SerDeInfo serializationLibrary */ + serializationLibrary?: (string|null); + + /** SerDeInfo parameters */ + parameters?: ({ [k: string]: string }|null); + } + + /** Represents a SerDeInfo. */ + class SerDeInfo implements ISerDeInfo { + + /** + * Constructs a new SerDeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.ISerDeInfo); + + /** SerDeInfo name. */ + public name: string; + + /** SerDeInfo serializationLibrary. */ + public serializationLibrary: string; + + /** SerDeInfo parameters. */ + public parameters: { [k: string]: string }; + + /** + * Creates a new SerDeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SerDeInfo instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.ISerDeInfo): google.cloud.bigquery.storage.v1alpha.SerDeInfo; + + /** + * Encodes the specified SerDeInfo message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.SerDeInfo.verify|verify} messages. + * @param message SerDeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.ISerDeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SerDeInfo message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.SerDeInfo.verify|verify} messages. + * @param message SerDeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.ISerDeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.SerDeInfo; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.SerDeInfo; + + /** + * Verifies a SerDeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SerDeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SerDeInfo + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.SerDeInfo; + + /** + * Creates a plain object from a SerDeInfo message. Also converts values to other types if specified. + * @param message SerDeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.SerDeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SerDeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SerDeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartition. */ + interface IMetastorePartition { + + /** MetastorePartition values */ + values?: (string[]|null); + + /** MetastorePartition createTime */ + createTime?: (google.protobuf.ITimestamp|null); + + /** MetastorePartition storageDescriptor */ + storageDescriptor?: (google.cloud.bigquery.storage.v1alpha.IStorageDescriptor|null); + + /** MetastorePartition parameters */ + parameters?: ({ [k: string]: string }|null); + + /** MetastorePartition fields */ + fields?: (google.cloud.bigquery.storage.v1alpha.IFieldSchema[]|null); + } + + /** Represents a MetastorePartition. */ + class MetastorePartition implements IMetastorePartition { + + /** + * Constructs a new MetastorePartition. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartition); + + /** MetastorePartition values. */ + public values: string[]; + + /** MetastorePartition createTime. */ + public createTime?: (google.protobuf.ITimestamp|null); + + /** MetastorePartition storageDescriptor. */ + public storageDescriptor?: (google.cloud.bigquery.storage.v1alpha.IStorageDescriptor|null); + + /** MetastorePartition parameters. */ + public parameters: { [k: string]: string }; + + /** MetastorePartition fields. */ + public fields: google.cloud.bigquery.storage.v1alpha.IFieldSchema[]; + + /** + * Creates a new MetastorePartition instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartition instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartition): google.cloud.bigquery.storage.v1alpha.MetastorePartition; + + /** + * Encodes the specified MetastorePartition message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify|verify} messages. + * @param message MetastorePartition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify|verify} messages. + * @param message MetastorePartition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.MetastorePartition; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.MetastorePartition; + + /** + * Verifies a MetastorePartition message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartition message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartition + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.MetastorePartition; + + /** + * Creates a plain object from a MetastorePartition message. Also converts values to other types if specified. + * @param message MetastorePartition + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.MetastorePartition, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartition to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartition + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartitionList. */ + interface IMetastorePartitionList { + + /** MetastorePartitionList partitions */ + partitions?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]|null); + } + + /** Represents a MetastorePartitionList. */ + class MetastorePartitionList implements IMetastorePartitionList { + + /** + * Constructs a new MetastorePartitionList. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList); + + /** MetastorePartitionList partitions. */ + public partitions: google.cloud.bigquery.storage.v1alpha.IMetastorePartition[]; + + /** + * Creates a new MetastorePartitionList instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartitionList instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList): google.cloud.bigquery.storage.v1alpha.MetastorePartitionList; + + /** + * Encodes the specified MetastorePartitionList message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.verify|verify} messages. + * @param message MetastorePartitionList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartitionList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.verify|verify} messages. + * @param message MetastorePartitionList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.MetastorePartitionList; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.MetastorePartitionList; + + /** + * Verifies a MetastorePartitionList message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartitionList message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartitionList + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.MetastorePartitionList; + + /** + * Creates a plain object from a MetastorePartitionList message. Also converts values to other types if specified. + * @param message MetastorePartitionList + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.MetastorePartitionList, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartitionList to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartitionList + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReadStream. */ + interface IReadStream { + + /** ReadStream name */ + name?: (string|null); + } + + /** Represents a ReadStream. */ + class ReadStream implements IReadStream { + + /** + * Constructs a new ReadStream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IReadStream); + + /** ReadStream name. */ + public name: string; + + /** + * Creates a new ReadStream instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadStream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IReadStream): google.cloud.bigquery.storage.v1alpha.ReadStream; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.ReadStream; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.ReadStream; + + /** + * Verifies a ReadStream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadStream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.ReadStream; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @param message ReadStream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadStream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamList. */ + interface IStreamList { + + /** StreamList streams */ + streams?: (google.cloud.bigquery.storage.v1alpha.IReadStream[]|null); + } + + /** Represents a StreamList. */ + class StreamList implements IStreamList { + + /** + * Constructs a new StreamList. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IStreamList); + + /** StreamList streams. */ + public streams: google.cloud.bigquery.storage.v1alpha.IReadStream[]; + + /** + * Creates a new StreamList instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamList instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IStreamList): google.cloud.bigquery.storage.v1alpha.StreamList; + + /** + * Encodes the specified StreamList message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamList.verify|verify} messages. + * @param message StreamList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IStreamList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamList.verify|verify} messages. + * @param message StreamList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IStreamList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamList message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.StreamList; + + /** + * Decodes a StreamList message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.StreamList; + + /** + * Verifies a StreamList message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamList message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamList + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.StreamList; + + /** + * Creates a plain object from a StreamList message. Also converts values to other types if specified. + * @param message StreamList + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.StreamList, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamList to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamList + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartitionValues. */ + interface IMetastorePartitionValues { + + /** MetastorePartitionValues values */ + values?: (string[]|null); + } + + /** Represents a MetastorePartitionValues. */ + class MetastorePartitionValues implements IMetastorePartitionValues { + + /** + * Constructs a new MetastorePartitionValues. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues); + + /** MetastorePartitionValues values. */ + public values: string[]; + + /** + * Creates a new MetastorePartitionValues instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartitionValues instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues): google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues; + + /** + * Encodes the specified MetastorePartitionValues message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.verify|verify} messages. + * @param message MetastorePartitionValues message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartitionValues message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.verify|verify} messages. + * @param message MetastorePartitionValues message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues; + + /** + * Verifies a MetastorePartitionValues message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartitionValues message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartitionValues + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues; + + /** + * Creates a plain object from a MetastorePartitionValues message. Also converts values to other types if specified. + * @param message MetastorePartitionValues + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartitionValues to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartitionValues + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Namespace v1beta1. */ namespace v1beta1 { @@ -12276,6 +14484,103 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + /** Properties of a FieldMask. */ + interface IFieldMask { + + /** FieldMask paths */ + paths?: (string[]|null); + } + + /** Represents a FieldMask. */ + class FieldMask implements IFieldMask { + + /** + * Constructs a new FieldMask. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.IFieldMask); + + /** FieldMask paths. */ + public paths: string[]; + + /** + * Creates a new FieldMask instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldMask instance + */ + public static create(properties?: google.protobuf.IFieldMask): google.protobuf.FieldMask; + + /** + * Encodes the specified FieldMask message. Does not implicitly {@link google.protobuf.FieldMask.verify|verify} messages. + * @param message FieldMask message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.IFieldMask, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldMask message, length delimited. Does not implicitly {@link google.protobuf.FieldMask.verify|verify} messages. + * @param message FieldMask message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.IFieldMask, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldMask message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldMask; + + /** + * Decodes a FieldMask message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldMask; + + /** + * Verifies a FieldMask message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldMask message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldMask + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldMask; + + /** + * Creates a plain object from a FieldMask message. Also converts values to other types if specified. + * @param message FieldMask + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldMask, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldMask to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldMask + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } /** Namespace api. */ diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 360d687ad51..208e6c163b8 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -11221,6 +11221,5202 @@ return v1; })(); + storage.v1alpha = (function() { + + /** + * Namespace v1alpha. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1alpha = {}; + + v1alpha.MetastorePartitionService = (function() { + + /** + * Constructs a new MetastorePartitionService service. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a MetastorePartitionService + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function MetastorePartitionService(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (MetastorePartitionService.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = MetastorePartitionService; + + /** + * Creates new MetastorePartitionService service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {MetastorePartitionService} RPC service. Useful where requests and/or responses are streamed. + */ + MetastorePartitionService.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchCreateMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @typedef BatchCreateMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} [response] BatchCreateMetastorePartitionsResponse + */ + + /** + * Calls BatchCreateMetastorePartitions. + * @function batchCreateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest} request BatchCreateMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and BatchCreateMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchCreateMetastorePartitions = function batchCreateMetastorePartitions(request, callback) { + return this.rpcCall(batchCreateMetastorePartitions, $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse, request, callback); + }, "name", { value: "BatchCreateMetastorePartitions" }); + + /** + * Calls BatchCreateMetastorePartitions. + * @function batchCreateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest} request BatchCreateMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchDeleteMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @typedef BatchDeleteMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.protobuf.Empty} [response] Empty + */ + + /** + * Calls BatchDeleteMetastorePartitions. + * @function batchDeleteMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest} request BatchDeleteMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and Empty + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchDeleteMetastorePartitions = function batchDeleteMetastorePartitions(request, callback) { + return this.rpcCall(batchDeleteMetastorePartitions, $root.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest, $root.google.protobuf.Empty, request, callback); + }, "name", { value: "BatchDeleteMetastorePartitions" }); + + /** + * Calls BatchDeleteMetastorePartitions. + * @function batchDeleteMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest} request BatchDeleteMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|batchUpdateMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @typedef BatchUpdateMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} [response] BatchUpdateMetastorePartitionsResponse + */ + + /** + * Calls BatchUpdateMetastorePartitions. + * @function batchUpdateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest} request BatchUpdateMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and BatchUpdateMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchUpdateMetastorePartitions = function batchUpdateMetastorePartitions(request, callback) { + return this.rpcCall(batchUpdateMetastorePartitions, $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse, request, callback); + }, "name", { value: "BatchUpdateMetastorePartitions" }); + + /** + * Calls BatchUpdateMetastorePartitions. + * @function batchUpdateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest} request BatchUpdateMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|listMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @typedef ListMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} [response] ListMetastorePartitionsResponse + */ + + /** + * Calls ListMetastorePartitions. + * @function listMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest} request ListMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and ListMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.listMetastorePartitions = function listMetastorePartitions(request, callback) { + return this.rpcCall(listMetastorePartitions, $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse, request, callback); + }, "name", { value: "ListMetastorePartitions" }); + + /** + * Calls ListMetastorePartitions. + * @function listMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest} request ListMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionService|streamMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @typedef StreamMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} [response] StreamMetastorePartitionsResponse + */ + + /** + * Calls StreamMetastorePartitions. + * @function streamMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest} request StreamMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and StreamMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.streamMetastorePartitions = function streamMetastorePartitions(request, callback) { + return this.rpcCall(streamMetastorePartitions, $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse, request, callback); + }, "name", { value: "StreamMetastorePartitions" }); + + /** + * Calls StreamMetastorePartitions. + * @function streamMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest} request StreamMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return MetastorePartitionService; + })(); + + v1alpha.CreateMetastorePartitionRequest = (function() { + + /** + * Properties of a CreateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface ICreateMetastorePartitionRequest + * @property {string|null} [parent] CreateMetastorePartitionRequest parent + * @property {google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null} [metastorePartition] CreateMetastorePartitionRequest metastorePartition + */ + + /** + * Constructs a new CreateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a CreateMetastorePartitionRequest. + * @implements ICreateMetastorePartitionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest=} [properties] Properties to set + */ + function CreateMetastorePartitionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateMetastorePartitionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @instance + */ + CreateMetastorePartitionRequest.prototype.parent = ""; + + /** + * CreateMetastorePartitionRequest metastorePartition. + * @member {google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null|undefined} metastorePartition + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @instance + */ + CreateMetastorePartitionRequest.prototype.metastorePartition = null; + + /** + * Creates a new CreateMetastorePartitionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest instance + */ + CreateMetastorePartitionRequest.create = function create(properties) { + return new CreateMetastorePartitionRequest(properties); + }; + + /** + * Encodes the specified CreateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest} message CreateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateMetastorePartitionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.metastorePartition != null && Object.hasOwnProperty.call(message, "metastorePartition")) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.metastorePartition, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified CreateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ICreateMetastorePartitionRequest} message CreateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateMetastorePartitionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateMetastorePartitionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateMetastorePartitionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateMetastorePartitionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateMetastorePartitionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.metastorePartition); + if (error) + return "metastorePartition." + error; + } + return null; + }; + + /** + * Creates a CreateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + */ + CreateMetastorePartitionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.metastorePartition != null) { + if (typeof object.metastorePartition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.metastorePartition: object expected"); + message.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.metastorePartition); + } + return message; + }; + + /** + * Creates a plain object from a CreateMetastorePartitionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest} message CreateMetastorePartitionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateMetastorePartitionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.metastorePartition = null; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) + object.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.metastorePartition, options); + return object; + }; + + /** + * Converts this CreateMetastorePartitionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateMetastorePartitionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CreateMetastorePartitionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateMetastorePartitionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest"; + }; + + return CreateMetastorePartitionRequest; + })(); + + v1alpha.BatchCreateMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchCreateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchCreateMetastorePartitionsRequest + * @property {string|null} [parent] BatchCreateMetastorePartitionsRequest parent + * @property {Array.|null} [requests] BatchCreateMetastorePartitionsRequest requests + * @property {boolean|null} [skipExistingPartitions] BatchCreateMetastorePartitionsRequest skipExistingPartitions + */ + + /** + * Constructs a new BatchCreateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchCreateMetastorePartitionsRequest. + * @implements IBatchCreateMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchCreateMetastorePartitionsRequest(properties) { + this.requests = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchCreateMetastorePartitionsRequest requests. + * @member {Array.} requests + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.requests = $util.emptyArray; + + /** + * BatchCreateMetastorePartitionsRequest skipExistingPartitions. + * @member {boolean} skipExistingPartitions + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.skipExistingPartitions = false; + + /** + * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest instance + */ + BatchCreateMetastorePartitionsRequest.create = function create(properties) { + return new BatchCreateMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.requests != null && message.requests.length) + for (var i = 0; i < message.requests.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.skipExistingPartitions != null && Object.hasOwnProperty.call(message, "skipExistingPartitions")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.skipExistingPartitions); + return writer; + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.requests && message.requests.length)) + message.requests = []; + message.requests.push($root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.decode(reader, reader.uint32())); + break; + } + case 3: { + message.skipExistingPartitions = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.requests != null && message.hasOwnProperty("requests")) { + if (!Array.isArray(message.requests)) + return "requests: array expected"; + for (var i = 0; i < message.requests.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.verify(message.requests[i]); + if (error) + return "requests." + error; + } + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + if (typeof message.skipExistingPartitions !== "boolean") + return "skipExistingPartitions: boolean expected"; + return null; + }; + + /** + * Creates a BatchCreateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + */ + BatchCreateMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.requests) { + if (!Array.isArray(object.requests)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.requests: array expected"); + message.requests = []; + for (var i = 0; i < object.requests.length; ++i) { + if (typeof object.requests[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest.requests: object expected"); + message.requests[i] = $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.fromObject(object.requests[i]); + } + } + if (object.skipExistingPartitions != null) + message.skipExistingPartitions = Boolean(object.skipExistingPartitions); + return message; + }; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.requests = []; + if (options.defaults) { + object.parent = ""; + object.skipExistingPartitions = false; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.requests && message.requests.length) { + object.requests = []; + for (var j = 0; j < message.requests.length; ++j) + object.requests[j] = $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.toObject(message.requests[j], options); + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + object.skipExistingPartitions = message.skipExistingPartitions; + return object; + }; + + /** + * Converts this BatchCreateMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCreateMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest"; + }; + + return BatchCreateMetastorePartitionsRequest; + })(); + + v1alpha.BatchCreateMetastorePartitionsResponse = (function() { + + /** + * Properties of a BatchCreateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchCreateMetastorePartitionsResponse + * @property {Array.|null} [partitions] BatchCreateMetastorePartitionsResponse partitions + */ + + /** + * Constructs a new BatchCreateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchCreateMetastorePartitionsResponse. + * @implements IBatchCreateMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse=} [properties] Properties to set + */ + function BatchCreateMetastorePartitionsResponse(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateMetastorePartitionsResponse partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @instance + */ + BatchCreateMetastorePartitionsResponse.prototype.partitions = $util.emptyArray; + + /** + * Creates a new BatchCreateMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse instance + */ + BatchCreateMetastorePartitionsResponse.create = function create(properties) { + return new BatchCreateMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a BatchCreateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + */ + BatchCreateMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this BatchCreateMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCreateMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse"; + }; + + return BatchCreateMetastorePartitionsResponse; + })(); + + v1alpha.BatchDeleteMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchDeleteMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchDeleteMetastorePartitionsRequest + * @property {string|null} [parent] BatchDeleteMetastorePartitionsRequest parent + * @property {Array.|null} [partitionValues] BatchDeleteMetastorePartitionsRequest partitionValues + */ + + /** + * Constructs a new BatchDeleteMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchDeleteMetastorePartitionsRequest. + * @implements IBatchDeleteMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchDeleteMetastorePartitionsRequest(properties) { + this.partitionValues = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchDeleteMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchDeleteMetastorePartitionsRequest partitionValues. + * @member {Array.} partitionValues + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.partitionValues = $util.emptyArray; + + /** + * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest instance + */ + BatchDeleteMetastorePartitionsRequest.create = function create(properties) { + return new BatchDeleteMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchDeleteMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.partitionValues != null && message.partitionValues.length) + for (var i = 0; i < message.partitionValues.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.encode(message.partitionValues[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchDeleteMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchDeleteMetastorePartitionsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.partitionValues && message.partitionValues.length)) + message.partitionValues = []; + message.partitionValues.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchDeleteMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchDeleteMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchDeleteMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.partitionValues != null && message.hasOwnProperty("partitionValues")) { + if (!Array.isArray(message.partitionValues)) + return "partitionValues: array expected"; + for (var i = 0; i < message.partitionValues.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.verify(message.partitionValues[i]); + if (error) + return "partitionValues." + error; + } + } + return null; + }; + + /** + * Creates a BatchDeleteMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + */ + BatchDeleteMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.partitionValues) { + if (!Array.isArray(object.partitionValues)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.partitionValues: array expected"); + message.partitionValues = []; + for (var i = 0; i < object.partitionValues.length; ++i) { + if (typeof object.partitionValues[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest.partitionValues: object expected"); + message.partitionValues[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.fromObject(object.partitionValues[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchDeleteMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchDeleteMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitionValues = []; + if (options.defaults) + object.parent = ""; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.partitionValues && message.partitionValues.length) { + object.partitionValues = []; + for (var j = 0; j < message.partitionValues.length; ++j) + object.partitionValues[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.toObject(message.partitionValues[j], options); + } + return object; + }; + + /** + * Converts this BatchDeleteMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchDeleteMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchDeleteMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchDeleteMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest"; + }; + + return BatchDeleteMetastorePartitionsRequest; + })(); + + v1alpha.UpdateMetastorePartitionRequest = (function() { + + /** + * Properties of an UpdateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IUpdateMetastorePartitionRequest + * @property {google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null} [metastorePartition] UpdateMetastorePartitionRequest metastorePartition + * @property {google.protobuf.IFieldMask|null} [updateMask] UpdateMetastorePartitionRequest updateMask + */ + + /** + * Constructs a new UpdateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents an UpdateMetastorePartitionRequest. + * @implements IUpdateMetastorePartitionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest=} [properties] Properties to set + */ + function UpdateMetastorePartitionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UpdateMetastorePartitionRequest metastorePartition. + * @member {google.cloud.bigquery.storage.v1alpha.IMetastorePartition|null|undefined} metastorePartition + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @instance + */ + UpdateMetastorePartitionRequest.prototype.metastorePartition = null; + + /** + * UpdateMetastorePartitionRequest updateMask. + * @member {google.protobuf.IFieldMask|null|undefined} updateMask + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @instance + */ + UpdateMetastorePartitionRequest.prototype.updateMask = null; + + /** + * Creates a new UpdateMetastorePartitionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest instance + */ + UpdateMetastorePartitionRequest.create = function create(properties) { + return new UpdateMetastorePartitionRequest(properties); + }; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UpdateMetastorePartitionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.metastorePartition != null && Object.hasOwnProperty.call(message, "metastorePartition")) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.metastorePartition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.updateMask != null && Object.hasOwnProperty.call(message, "updateMask")) + $root.google.protobuf.FieldMask.encode(message.updateMask, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UpdateMetastorePartitionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UpdateMetastorePartitionRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32()); + break; + } + case 2: { + message.updateMask = $root.google.protobuf.FieldMask.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UpdateMetastorePartitionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an UpdateMetastorePartitionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UpdateMetastorePartitionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.metastorePartition); + if (error) + return "metastorePartition." + error; + } + if (message.updateMask != null && message.hasOwnProperty("updateMask")) { + var error = $root.google.protobuf.FieldMask.verify(message.updateMask); + if (error) + return "updateMask." + error; + } + return null; + }; + + /** + * Creates an UpdateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + */ + UpdateMetastorePartitionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest(); + if (object.metastorePartition != null) { + if (typeof object.metastorePartition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.metastorePartition: object expected"); + message.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.metastorePartition); + } + if (object.updateMask != null) { + if (typeof object.updateMask !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.updateMask: object expected"); + message.updateMask = $root.google.protobuf.FieldMask.fromObject(object.updateMask); + } + return message; + }; + + /** + * Creates a plain object from an UpdateMetastorePartitionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UpdateMetastorePartitionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.metastorePartition = null; + object.updateMask = null; + } + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) + object.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.metastorePartition, options); + if (message.updateMask != null && message.hasOwnProperty("updateMask")) + object.updateMask = $root.google.protobuf.FieldMask.toObject(message.updateMask, options); + return object; + }; + + /** + * Converts this UpdateMetastorePartitionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @instance + * @returns {Object.} JSON object + */ + UpdateMetastorePartitionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for UpdateMetastorePartitionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UpdateMetastorePartitionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest"; + }; + + return UpdateMetastorePartitionRequest; + })(); + + v1alpha.BatchUpdateMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchUpdateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchUpdateMetastorePartitionsRequest + * @property {string|null} [parent] BatchUpdateMetastorePartitionsRequest parent + * @property {Array.|null} [requests] BatchUpdateMetastorePartitionsRequest requests + */ + + /** + * Constructs a new BatchUpdateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchUpdateMetastorePartitionsRequest. + * @implements IBatchUpdateMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchUpdateMetastorePartitionsRequest(properties) { + this.requests = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchUpdateMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchUpdateMetastorePartitionsRequest requests. + * @member {Array.} requests + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.requests = $util.emptyArray; + + /** + * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest instance + */ + BatchUpdateMetastorePartitionsRequest.create = function create(properties) { + return new BatchUpdateMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.requests != null && message.requests.length) + for (var i = 0; i < message.requests.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.requests && message.requests.length)) + message.requests = []; + message.requests.push($root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchUpdateMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchUpdateMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.requests != null && message.hasOwnProperty("requests")) { + if (!Array.isArray(message.requests)) + return "requests: array expected"; + for (var i = 0; i < message.requests.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.verify(message.requests[i]); + if (error) + return "requests." + error; + } + } + return null; + }; + + /** + * Creates a BatchUpdateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + */ + BatchUpdateMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.requests) { + if (!Array.isArray(object.requests)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.requests: array expected"); + message.requests = []; + for (var i = 0; i < object.requests.length; ++i) { + if (typeof object.requests[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest.requests: object expected"); + message.requests[i] = $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.fromObject(object.requests[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchUpdateMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.requests = []; + if (options.defaults) + object.parent = ""; + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.requests && message.requests.length) { + object.requests = []; + for (var j = 0; j < message.requests.length; ++j) + object.requests[j] = $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.toObject(message.requests[j], options); + } + return object; + }; + + /** + * Converts this BatchUpdateMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchUpdateMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchUpdateMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest"; + }; + + return BatchUpdateMetastorePartitionsRequest; + })(); + + v1alpha.BatchUpdateMetastorePartitionsResponse = (function() { + + /** + * Properties of a BatchUpdateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchUpdateMetastorePartitionsResponse + * @property {Array.|null} [partitions] BatchUpdateMetastorePartitionsResponse partitions + */ + + /** + * Constructs a new BatchUpdateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchUpdateMetastorePartitionsResponse. + * @implements IBatchUpdateMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse=} [properties] Properties to set + */ + function BatchUpdateMetastorePartitionsResponse(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchUpdateMetastorePartitionsResponse partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @instance + */ + BatchUpdateMetastorePartitionsResponse.prototype.partitions = $util.emptyArray; + + /** + * Creates a new BatchUpdateMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse instance + */ + BatchUpdateMetastorePartitionsResponse.create = function create(properties) { + return new BatchUpdateMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchUpdateMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchUpdateMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a BatchUpdateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + */ + BatchUpdateMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchUpdateMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this BatchUpdateMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchUpdateMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchUpdateMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse"; + }; + + return BatchUpdateMetastorePartitionsResponse; + })(); + + v1alpha.ListMetastorePartitionsRequest = (function() { + + /** + * Properties of a ListMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IListMetastorePartitionsRequest + * @property {string|null} [parent] ListMetastorePartitionsRequest parent + * @property {string|null} [filter] ListMetastorePartitionsRequest filter + */ + + /** + * Constructs a new ListMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a ListMetastorePartitionsRequest. + * @implements IListMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest=} [properties] Properties to set + */ + function ListMetastorePartitionsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.parent = ""; + + /** + * ListMetastorePartitionsRequest filter. + * @member {string} filter + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.filter = ""; + + /** + * Creates a new ListMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest instance + */ + ListMetastorePartitionsRequest.create = function create(properties) { + return new ListMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified ListMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest} message ListMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.filter != null && Object.hasOwnProperty.call(message, "filter")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.filter); + return writer; + }; + + /** + * Encodes the specified ListMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest} message ListMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.filter = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.filter != null && message.hasOwnProperty("filter")) + if (!$util.isString(message.filter)) + return "filter: string expected"; + return null; + }; + + /** + * Creates a ListMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + */ + ListMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.filter != null) + message.filter = String(object.filter); + return message; + }; + + /** + * Creates a plain object from a ListMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest} message ListMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.filter = ""; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.filter != null && message.hasOwnProperty("filter")) + object.filter = message.filter; + return object; + }; + + /** + * Converts this ListMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + ListMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest"; + }; + + return ListMetastorePartitionsRequest; + })(); + + v1alpha.ListMetastorePartitionsResponse = (function() { + + /** + * Properties of a ListMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IListMetastorePartitionsResponse + * @property {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList|null} [partitions] ListMetastorePartitionsResponse partitions + * @property {google.cloud.bigquery.storage.v1alpha.IStreamList|null} [streams] ListMetastorePartitionsResponse streams + */ + + /** + * Constructs a new ListMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a ListMetastorePartitionsResponse. + * @implements IListMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse=} [properties] Properties to set + */ + function ListMetastorePartitionsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListMetastorePartitionsResponse partitions. + * @member {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList|null|undefined} partitions + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @instance + */ + ListMetastorePartitionsResponse.prototype.partitions = null; + + /** + * ListMetastorePartitionsResponse streams. + * @member {google.cloud.bigquery.storage.v1alpha.IStreamList|null|undefined} streams + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @instance + */ + ListMetastorePartitionsResponse.prototype.streams = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ListMetastorePartitionsResponse response. + * @member {"partitions"|"streams"|undefined} response + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @instance + */ + Object.defineProperty(ListMetastorePartitionsResponse.prototype, "response", { + get: $util.oneOfGetter($oneOfFields = ["partitions", "streams"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ListMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse instance + */ + ListMetastorePartitionsResponse.create = function create(properties) { + return new ListMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified ListMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse} message ListMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && Object.hasOwnProperty.call(message, "partitions")) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.encode(message.partitions, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.streams != null && Object.hasOwnProperty.call(message, "streams")) + $root.google.cloud.bigquery.storage.v1alpha.StreamList.encode(message.streams, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ListMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse} message ListMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.partitions = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.decode(reader, reader.uint32()); + break; + } + case 2: { + message.streams = $root.google.cloud.bigquery.storage.v1alpha.StreamList.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.verify(message.partitions); + if (error) + return "partitions." + error; + } + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (properties.response === 1) + return "response: multiple values"; + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1alpha.StreamList.verify(message.streams); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a ListMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + */ + ListMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse(); + if (object.partitions != null) { + if (typeof object.partitions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.partitions: object expected"); + message.partitions = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.fromObject(object.partitions); + } + if (object.streams != null) { + if (typeof object.streams !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse.streams: object expected"); + message.streams = $root.google.cloud.bigquery.storage.v1alpha.StreamList.fromObject(object.streams); + } + return message; + }; + + /** + * Creates a plain object from a ListMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse} message ListMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + object.partitions = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.toObject(message.partitions, options); + if (options.oneofs) + object.response = "partitions"; + } + if (message.streams != null && message.hasOwnProperty("streams")) { + object.streams = $root.google.cloud.bigquery.storage.v1alpha.StreamList.toObject(message.streams, options); + if (options.oneofs) + object.response = "streams"; + } + return object; + }; + + /** + * Converts this ListMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + ListMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse"; + }; + + return ListMetastorePartitionsResponse; + })(); + + v1alpha.StreamMetastorePartitionsRequest = (function() { + + /** + * Properties of a StreamMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IStreamMetastorePartitionsRequest + * @property {string|null} [parent] StreamMetastorePartitionsRequest parent + * @property {Array.|null} [metastorePartitions] StreamMetastorePartitionsRequest metastorePartitions + * @property {boolean|null} [skipExistingPartitions] StreamMetastorePartitionsRequest skipExistingPartitions + */ + + /** + * Constructs a new StreamMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a StreamMetastorePartitionsRequest. + * @implements IStreamMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest=} [properties] Properties to set + */ + function StreamMetastorePartitionsRequest(properties) { + this.metastorePartitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.parent = ""; + + /** + * StreamMetastorePartitionsRequest metastorePartitions. + * @member {Array.} metastorePartitions + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.metastorePartitions = $util.emptyArray; + + /** + * StreamMetastorePartitionsRequest skipExistingPartitions. + * @member {boolean} skipExistingPartitions + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.skipExistingPartitions = false; + + /** + * Creates a new StreamMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest instance + */ + StreamMetastorePartitionsRequest.create = function create(properties) { + return new StreamMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.metastorePartitions != null && message.metastorePartitions.length) + for (var i = 0; i < message.metastorePartitions.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.metastorePartitions[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.skipExistingPartitions != null && Object.hasOwnProperty.call(message, "skipExistingPartitions")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.skipExistingPartitions); + return writer; + }; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.metastorePartitions && message.metastorePartitions.length)) + message.metastorePartitions = []; + message.metastorePartitions.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32())); + break; + } + case 3: { + message.skipExistingPartitions = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.metastorePartitions != null && message.hasOwnProperty("metastorePartitions")) { + if (!Array.isArray(message.metastorePartitions)) + return "metastorePartitions: array expected"; + for (var i = 0; i < message.metastorePartitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.metastorePartitions[i]); + if (error) + return "metastorePartitions." + error; + } + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + if (typeof message.skipExistingPartitions !== "boolean") + return "skipExistingPartitions: boolean expected"; + return null; + }; + + /** + * Creates a StreamMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + */ + StreamMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.metastorePartitions) { + if (!Array.isArray(object.metastorePartitions)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.metastorePartitions: array expected"); + message.metastorePartitions = []; + for (var i = 0; i < object.metastorePartitions.length; ++i) { + if (typeof object.metastorePartitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest.metastorePartitions: object expected"); + message.metastorePartitions[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.metastorePartitions[i]); + } + } + if (object.skipExistingPartitions != null) + message.skipExistingPartitions = Boolean(object.skipExistingPartitions); + return message; + }; + + /** + * Creates a plain object from a StreamMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.metastorePartitions = []; + if (options.defaults) { + object.parent = ""; + object.skipExistingPartitions = false; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.metastorePartitions && message.metastorePartitions.length) { + object.metastorePartitions = []; + for (var j = 0; j < message.metastorePartitions.length; ++j) + object.metastorePartitions[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.metastorePartitions[j], options); + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + object.skipExistingPartitions = message.skipExistingPartitions; + return object; + }; + + /** + * Converts this StreamMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + StreamMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest"; + }; + + return StreamMetastorePartitionsRequest; + })(); + + v1alpha.StreamMetastorePartitionsResponse = (function() { + + /** + * Properties of a StreamMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IStreamMetastorePartitionsResponse + * @property {number|Long|null} [totalPartitionsStreamedCount] StreamMetastorePartitionsResponse totalPartitionsStreamedCount + * @property {number|Long|null} [totalPartitionsInsertedCount] StreamMetastorePartitionsResponse totalPartitionsInsertedCount + */ + + /** + * Constructs a new StreamMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a StreamMetastorePartitionsResponse. + * @implements IStreamMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse=} [properties] Properties to set + */ + function StreamMetastorePartitionsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamMetastorePartitionsResponse totalPartitionsStreamedCount. + * @member {number|Long} totalPartitionsStreamedCount + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @instance + */ + StreamMetastorePartitionsResponse.prototype.totalPartitionsStreamedCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * StreamMetastorePartitionsResponse totalPartitionsInsertedCount. + * @member {number|Long} totalPartitionsInsertedCount + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @instance + */ + StreamMetastorePartitionsResponse.prototype.totalPartitionsInsertedCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new StreamMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse instance + */ + StreamMetastorePartitionsResponse.create = function create(properties) { + return new StreamMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.totalPartitionsStreamedCount != null && Object.hasOwnProperty.call(message, "totalPartitionsStreamedCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.totalPartitionsStreamedCount); + if (message.totalPartitionsInsertedCount != null && Object.hasOwnProperty.call(message, "totalPartitionsInsertedCount")) + writer.uint32(/* id 3, wireType 0 =*/24).int64(message.totalPartitionsInsertedCount); + return writer; + }; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + message.totalPartitionsStreamedCount = reader.int64(); + break; + } + case 3: { + message.totalPartitionsInsertedCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.totalPartitionsStreamedCount != null && message.hasOwnProperty("totalPartitionsStreamedCount")) + if (!$util.isInteger(message.totalPartitionsStreamedCount) && !(message.totalPartitionsStreamedCount && $util.isInteger(message.totalPartitionsStreamedCount.low) && $util.isInteger(message.totalPartitionsStreamedCount.high))) + return "totalPartitionsStreamedCount: integer|Long expected"; + if (message.totalPartitionsInsertedCount != null && message.hasOwnProperty("totalPartitionsInsertedCount")) + if (!$util.isInteger(message.totalPartitionsInsertedCount) && !(message.totalPartitionsInsertedCount && $util.isInteger(message.totalPartitionsInsertedCount.low) && $util.isInteger(message.totalPartitionsInsertedCount.high))) + return "totalPartitionsInsertedCount: integer|Long expected"; + return null; + }; + + /** + * Creates a StreamMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + */ + StreamMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse(); + if (object.totalPartitionsStreamedCount != null) + if ($util.Long) + (message.totalPartitionsStreamedCount = $util.Long.fromValue(object.totalPartitionsStreamedCount)).unsigned = false; + else if (typeof object.totalPartitionsStreamedCount === "string") + message.totalPartitionsStreamedCount = parseInt(object.totalPartitionsStreamedCount, 10); + else if (typeof object.totalPartitionsStreamedCount === "number") + message.totalPartitionsStreamedCount = object.totalPartitionsStreamedCount; + else if (typeof object.totalPartitionsStreamedCount === "object") + message.totalPartitionsStreamedCount = new $util.LongBits(object.totalPartitionsStreamedCount.low >>> 0, object.totalPartitionsStreamedCount.high >>> 0).toNumber(); + if (object.totalPartitionsInsertedCount != null) + if ($util.Long) + (message.totalPartitionsInsertedCount = $util.Long.fromValue(object.totalPartitionsInsertedCount)).unsigned = false; + else if (typeof object.totalPartitionsInsertedCount === "string") + message.totalPartitionsInsertedCount = parseInt(object.totalPartitionsInsertedCount, 10); + else if (typeof object.totalPartitionsInsertedCount === "number") + message.totalPartitionsInsertedCount = object.totalPartitionsInsertedCount; + else if (typeof object.totalPartitionsInsertedCount === "object") + message.totalPartitionsInsertedCount = new $util.LongBits(object.totalPartitionsInsertedCount.low >>> 0, object.totalPartitionsInsertedCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a StreamMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.totalPartitionsStreamedCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.totalPartitionsStreamedCount = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.totalPartitionsInsertedCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.totalPartitionsInsertedCount = options.longs === String ? "0" : 0; + } + if (message.totalPartitionsStreamedCount != null && message.hasOwnProperty("totalPartitionsStreamedCount")) + if (typeof message.totalPartitionsStreamedCount === "number") + object.totalPartitionsStreamedCount = options.longs === String ? String(message.totalPartitionsStreamedCount) : message.totalPartitionsStreamedCount; + else + object.totalPartitionsStreamedCount = options.longs === String ? $util.Long.prototype.toString.call(message.totalPartitionsStreamedCount) : options.longs === Number ? new $util.LongBits(message.totalPartitionsStreamedCount.low >>> 0, message.totalPartitionsStreamedCount.high >>> 0).toNumber() : message.totalPartitionsStreamedCount; + if (message.totalPartitionsInsertedCount != null && message.hasOwnProperty("totalPartitionsInsertedCount")) + if (typeof message.totalPartitionsInsertedCount === "number") + object.totalPartitionsInsertedCount = options.longs === String ? String(message.totalPartitionsInsertedCount) : message.totalPartitionsInsertedCount; + else + object.totalPartitionsInsertedCount = options.longs === String ? $util.Long.prototype.toString.call(message.totalPartitionsInsertedCount) : options.longs === Number ? new $util.LongBits(message.totalPartitionsInsertedCount.low >>> 0, message.totalPartitionsInsertedCount.high >>> 0).toNumber() : message.totalPartitionsInsertedCount; + return object; + }; + + /** + * Converts this StreamMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + StreamMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse"; + }; + + return StreamMetastorePartitionsResponse; + })(); + + v1alpha.BatchSizeTooLargeError = (function() { + + /** + * Properties of a BatchSizeTooLargeError. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IBatchSizeTooLargeError + * @property {number|Long|null} [maxBatchSize] BatchSizeTooLargeError maxBatchSize + * @property {string|null} [errorMessage] BatchSizeTooLargeError errorMessage + */ + + /** + * Constructs a new BatchSizeTooLargeError. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a BatchSizeTooLargeError. + * @implements IBatchSizeTooLargeError + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError=} [properties] Properties to set + */ + function BatchSizeTooLargeError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchSizeTooLargeError maxBatchSize. + * @member {number|Long} maxBatchSize + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @instance + */ + BatchSizeTooLargeError.prototype.maxBatchSize = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * BatchSizeTooLargeError errorMessage. + * @member {string} errorMessage + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @instance + */ + BatchSizeTooLargeError.prototype.errorMessage = ""; + + /** + * Creates a new BatchSizeTooLargeError instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError} BatchSizeTooLargeError instance + */ + BatchSizeTooLargeError.create = function create(properties) { + return new BatchSizeTooLargeError(properties); + }; + + /** + * Encodes the specified BatchSizeTooLargeError message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError} message BatchSizeTooLargeError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchSizeTooLargeError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.maxBatchSize != null && Object.hasOwnProperty.call(message, "maxBatchSize")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.maxBatchSize); + if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.errorMessage); + return writer; + }; + + /** + * Encodes the specified BatchSizeTooLargeError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IBatchSizeTooLargeError} message BatchSizeTooLargeError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchSizeTooLargeError.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError} BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchSizeTooLargeError.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.maxBatchSize = reader.int64(); + break; + } + case 2: { + message.errorMessage = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError} BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchSizeTooLargeError.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchSizeTooLargeError message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchSizeTooLargeError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.maxBatchSize != null && message.hasOwnProperty("maxBatchSize")) + if (!$util.isInteger(message.maxBatchSize) && !(message.maxBatchSize && $util.isInteger(message.maxBatchSize.low) && $util.isInteger(message.maxBatchSize.high))) + return "maxBatchSize: integer|Long expected"; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + if (!$util.isString(message.errorMessage)) + return "errorMessage: string expected"; + return null; + }; + + /** + * Creates a BatchSizeTooLargeError message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError} BatchSizeTooLargeError + */ + BatchSizeTooLargeError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError(); + if (object.maxBatchSize != null) + if ($util.Long) + (message.maxBatchSize = $util.Long.fromValue(object.maxBatchSize)).unsigned = false; + else if (typeof object.maxBatchSize === "string") + message.maxBatchSize = parseInt(object.maxBatchSize, 10); + else if (typeof object.maxBatchSize === "number") + message.maxBatchSize = object.maxBatchSize; + else if (typeof object.maxBatchSize === "object") + message.maxBatchSize = new $util.LongBits(object.maxBatchSize.low >>> 0, object.maxBatchSize.high >>> 0).toNumber(); + if (object.errorMessage != null) + message.errorMessage = String(object.errorMessage); + return message; + }; + + /** + * Creates a plain object from a BatchSizeTooLargeError message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError} message BatchSizeTooLargeError + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchSizeTooLargeError.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.maxBatchSize = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.maxBatchSize = options.longs === String ? "0" : 0; + object.errorMessage = ""; + } + if (message.maxBatchSize != null && message.hasOwnProperty("maxBatchSize")) + if (typeof message.maxBatchSize === "number") + object.maxBatchSize = options.longs === String ? String(message.maxBatchSize) : message.maxBatchSize; + else + object.maxBatchSize = options.longs === String ? $util.Long.prototype.toString.call(message.maxBatchSize) : options.longs === Number ? new $util.LongBits(message.maxBatchSize.low >>> 0, message.maxBatchSize.high >>> 0).toNumber() : message.maxBatchSize; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + object.errorMessage = message.errorMessage; + return object; + }; + + /** + * Converts this BatchSizeTooLargeError to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @instance + * @returns {Object.} JSON object + */ + BatchSizeTooLargeError.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchSizeTooLargeError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchSizeTooLargeError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError"; + }; + + return BatchSizeTooLargeError; + })(); + + v1alpha.FieldSchema = (function() { + + /** + * Properties of a FieldSchema. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IFieldSchema + * @property {string|null} [name] FieldSchema name + * @property {string|null} [type] FieldSchema type + */ + + /** + * Constructs a new FieldSchema. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a FieldSchema. + * @implements IFieldSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IFieldSchema=} [properties] Properties to set + */ + function FieldSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldSchema name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @instance + */ + FieldSchema.prototype.name = ""; + + /** + * FieldSchema type. + * @member {string} type + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @instance + */ + FieldSchema.prototype.type = ""; + + /** + * Creates a new FieldSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IFieldSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.FieldSchema} FieldSchema instance + */ + FieldSchema.create = function create(properties) { + return new FieldSchema(properties); + }; + + /** + * Encodes the specified FieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.FieldSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IFieldSchema} message FieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.type); + return writer; + }; + + /** + * Encodes the specified FieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.FieldSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IFieldSchema} message FieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.FieldSchema} FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldSchema.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.FieldSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.FieldSchema} FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + return null; + }; + + /** + * Creates a FieldSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.FieldSchema} FieldSchema + */ + FieldSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.FieldSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.FieldSchema(); + if (object.name != null) + message.name = String(object.name); + if (object.type != null) + message.type = String(object.type); + return message; + }; + + /** + * Creates a plain object from a FieldSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1alpha.FieldSchema} message FieldSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.type = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + return object; + }; + + /** + * Converts this FieldSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @instance + * @returns {Object.} JSON object + */ + FieldSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.FieldSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.FieldSchema"; + }; + + return FieldSchema; + })(); + + v1alpha.StorageDescriptor = (function() { + + /** + * Properties of a StorageDescriptor. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IStorageDescriptor + * @property {string|null} [locationUri] StorageDescriptor locationUri + * @property {string|null} [inputFormat] StorageDescriptor inputFormat + * @property {string|null} [outputFormat] StorageDescriptor outputFormat + * @property {google.cloud.bigquery.storage.v1alpha.ISerDeInfo|null} [serdeInfo] StorageDescriptor serdeInfo + */ + + /** + * Constructs a new StorageDescriptor. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a StorageDescriptor. + * @implements IStorageDescriptor + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor=} [properties] Properties to set + */ + function StorageDescriptor(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StorageDescriptor locationUri. + * @member {string} locationUri + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.locationUri = ""; + + /** + * StorageDescriptor inputFormat. + * @member {string} inputFormat + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.inputFormat = ""; + + /** + * StorageDescriptor outputFormat. + * @member {string} outputFormat + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.outputFormat = ""; + + /** + * StorageDescriptor serdeInfo. + * @member {google.cloud.bigquery.storage.v1alpha.ISerDeInfo|null|undefined} serdeInfo + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.serdeInfo = null; + + /** + * Creates a new StorageDescriptor instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.StorageDescriptor} StorageDescriptor instance + */ + StorageDescriptor.create = function create(properties) { + return new StorageDescriptor(properties); + }; + + /** + * Encodes the specified StorageDescriptor message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StorageDescriptor.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor} message StorageDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageDescriptor.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.locationUri != null && Object.hasOwnProperty.call(message, "locationUri")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.locationUri); + if (message.inputFormat != null && Object.hasOwnProperty.call(message, "inputFormat")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputFormat); + if (message.outputFormat != null && Object.hasOwnProperty.call(message, "outputFormat")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputFormat); + if (message.serdeInfo != null && Object.hasOwnProperty.call(message, "serdeInfo")) + $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo.encode(message.serdeInfo, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StorageDescriptor message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StorageDescriptor.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor} message StorageDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageDescriptor.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.StorageDescriptor} StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageDescriptor.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.locationUri = reader.string(); + break; + } + case 2: { + message.inputFormat = reader.string(); + break; + } + case 3: { + message.outputFormat = reader.string(); + break; + } + case 4: { + message.serdeInfo = $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.StorageDescriptor} StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageDescriptor.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StorageDescriptor message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StorageDescriptor.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.locationUri != null && message.hasOwnProperty("locationUri")) + if (!$util.isString(message.locationUri)) + return "locationUri: string expected"; + if (message.inputFormat != null && message.hasOwnProperty("inputFormat")) + if (!$util.isString(message.inputFormat)) + return "inputFormat: string expected"; + if (message.outputFormat != null && message.hasOwnProperty("outputFormat")) + if (!$util.isString(message.outputFormat)) + return "outputFormat: string expected"; + if (message.serdeInfo != null && message.hasOwnProperty("serdeInfo")) { + var error = $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo.verify(message.serdeInfo); + if (error) + return "serdeInfo." + error; + } + return null; + }; + + /** + * Creates a StorageDescriptor message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.StorageDescriptor} StorageDescriptor + */ + StorageDescriptor.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor(); + if (object.locationUri != null) + message.locationUri = String(object.locationUri); + if (object.inputFormat != null) + message.inputFormat = String(object.inputFormat); + if (object.outputFormat != null) + message.outputFormat = String(object.outputFormat); + if (object.serdeInfo != null) { + if (typeof object.serdeInfo !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.StorageDescriptor.serdeInfo: object expected"); + message.serdeInfo = $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo.fromObject(object.serdeInfo); + } + return message; + }; + + /** + * Creates a plain object from a StorageDescriptor message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1alpha.StorageDescriptor} message StorageDescriptor + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StorageDescriptor.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.locationUri = ""; + object.inputFormat = ""; + object.outputFormat = ""; + object.serdeInfo = null; + } + if (message.locationUri != null && message.hasOwnProperty("locationUri")) + object.locationUri = message.locationUri; + if (message.inputFormat != null && message.hasOwnProperty("inputFormat")) + object.inputFormat = message.inputFormat; + if (message.outputFormat != null && message.hasOwnProperty("outputFormat")) + object.outputFormat = message.outputFormat; + if (message.serdeInfo != null && message.hasOwnProperty("serdeInfo")) + object.serdeInfo = $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo.toObject(message.serdeInfo, options); + return object; + }; + + /** + * Converts this StorageDescriptor to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @instance + * @returns {Object.} JSON object + */ + StorageDescriptor.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StorageDescriptor + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.StorageDescriptor + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StorageDescriptor.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.StorageDescriptor"; + }; + + return StorageDescriptor; + })(); + + v1alpha.SerDeInfo = (function() { + + /** + * Properties of a SerDeInfo. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface ISerDeInfo + * @property {string|null} [name] SerDeInfo name + * @property {string|null} [serializationLibrary] SerDeInfo serializationLibrary + * @property {Object.|null} [parameters] SerDeInfo parameters + */ + + /** + * Constructs a new SerDeInfo. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a SerDeInfo. + * @implements ISerDeInfo + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.ISerDeInfo=} [properties] Properties to set + */ + function SerDeInfo(properties) { + this.parameters = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SerDeInfo name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @instance + */ + SerDeInfo.prototype.name = ""; + + /** + * SerDeInfo serializationLibrary. + * @member {string} serializationLibrary + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @instance + */ + SerDeInfo.prototype.serializationLibrary = ""; + + /** + * SerDeInfo parameters. + * @member {Object.} parameters + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @instance + */ + SerDeInfo.prototype.parameters = $util.emptyObject; + + /** + * Creates a new SerDeInfo instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ISerDeInfo=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.SerDeInfo} SerDeInfo instance + */ + SerDeInfo.create = function create(properties) { + return new SerDeInfo(properties); + }; + + /** + * Encodes the specified SerDeInfo message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.SerDeInfo.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ISerDeInfo} message SerDeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SerDeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.serializationLibrary != null && Object.hasOwnProperty.call(message, "serializationLibrary")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.serializationLibrary); + if (message.parameters != null && Object.hasOwnProperty.call(message, "parameters")) + for (var keys = Object.keys(message.parameters), i = 0; i < keys.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.parameters[keys[i]]).ldelim(); + return writer; + }; + + /** + * Encodes the specified SerDeInfo message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.SerDeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ISerDeInfo} message SerDeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SerDeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.SerDeInfo} SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SerDeInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.serializationLibrary = reader.string(); + break; + } + case 3: { + if (message.parameters === $util.emptyObject) + message.parameters = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.parameters[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.SerDeInfo} SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SerDeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SerDeInfo message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SerDeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.serializationLibrary != null && message.hasOwnProperty("serializationLibrary")) + if (!$util.isString(message.serializationLibrary)) + return "serializationLibrary: string expected"; + if (message.parameters != null && message.hasOwnProperty("parameters")) { + if (!$util.isObject(message.parameters)) + return "parameters: object expected"; + var key = Object.keys(message.parameters); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.parameters[key[i]])) + return "parameters: string{k:string} expected"; + } + return null; + }; + + /** + * Creates a SerDeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.SerDeInfo} SerDeInfo + */ + SerDeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo(); + if (object.name != null) + message.name = String(object.name); + if (object.serializationLibrary != null) + message.serializationLibrary = String(object.serializationLibrary); + if (object.parameters) { + if (typeof object.parameters !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.SerDeInfo.parameters: object expected"); + message.parameters = {}; + for (var keys = Object.keys(object.parameters), i = 0; i < keys.length; ++i) + message.parameters[keys[i]] = String(object.parameters[keys[i]]); + } + return message; + }; + + /** + * Creates a plain object from a SerDeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1alpha.SerDeInfo} message SerDeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SerDeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.parameters = {}; + if (options.defaults) { + object.name = ""; + object.serializationLibrary = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.serializationLibrary != null && message.hasOwnProperty("serializationLibrary")) + object.serializationLibrary = message.serializationLibrary; + var keys2; + if (message.parameters && (keys2 = Object.keys(message.parameters)).length) { + object.parameters = {}; + for (var j = 0; j < keys2.length; ++j) + object.parameters[keys2[j]] = message.parameters[keys2[j]]; + } + return object; + }; + + /** + * Converts this SerDeInfo to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @instance + * @returns {Object.} JSON object + */ + SerDeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SerDeInfo + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.SerDeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SerDeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.SerDeInfo"; + }; + + return SerDeInfo; + })(); + + v1alpha.MetastorePartition = (function() { + + /** + * Properties of a MetastorePartition. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IMetastorePartition + * @property {Array.|null} [values] MetastorePartition values + * @property {google.protobuf.ITimestamp|null} [createTime] MetastorePartition createTime + * @property {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor|null} [storageDescriptor] MetastorePartition storageDescriptor + * @property {Object.|null} [parameters] MetastorePartition parameters + * @property {Array.|null} [fields] MetastorePartition fields + */ + + /** + * Constructs a new MetastorePartition. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a MetastorePartition. + * @implements IMetastorePartition + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartition=} [properties] Properties to set + */ + function MetastorePartition(properties) { + this.values = []; + this.parameters = {}; + this.fields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartition values. + * @member {Array.} values + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + */ + MetastorePartition.prototype.values = $util.emptyArray; + + /** + * MetastorePartition createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + */ + MetastorePartition.prototype.createTime = null; + + /** + * MetastorePartition storageDescriptor. + * @member {google.cloud.bigquery.storage.v1alpha.IStorageDescriptor|null|undefined} storageDescriptor + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + */ + MetastorePartition.prototype.storageDescriptor = null; + + /** + * MetastorePartition parameters. + * @member {Object.} parameters + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + */ + MetastorePartition.prototype.parameters = $util.emptyObject; + + /** + * MetastorePartition fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + */ + MetastorePartition.prototype.fields = $util.emptyArray; + + /** + * Creates a new MetastorePartition instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartition=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartition} MetastorePartition instance + */ + MetastorePartition.create = function create(properties) { + return new MetastorePartition(properties); + }; + + /** + * Encodes the specified MetastorePartition message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartition} message MetastorePartition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartition.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.values[i]); + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.storageDescriptor != null && Object.hasOwnProperty.call(message, "storageDescriptor")) + $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor.encode(message.storageDescriptor, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.parameters != null && Object.hasOwnProperty.call(message, "parameters")) + for (var keys = Object.keys(message.parameters), i = 0; i < keys.length; ++i) + writer.uint32(/* id 4, wireType 2 =*/34).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.parameters[keys[i]]).ldelim(); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.FieldSchema.encode(message.fields[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MetastorePartition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartition} message MetastorePartition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartition.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartition} MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartition.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push(reader.string()); + break; + } + case 2: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 3: { + message.storageDescriptor = $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor.decode(reader, reader.uint32()); + break; + } + case 4: { + if (message.parameters === $util.emptyObject) + message.parameters = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.parameters[key] = value; + break; + } + case 5: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1alpha.FieldSchema.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartition} MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartition.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartition message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartition.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) + if (!$util.isString(message.values[i])) + return "values: string[] expected"; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.createTime); + if (error) + return "createTime." + error; + } + if (message.storageDescriptor != null && message.hasOwnProperty("storageDescriptor")) { + var error = $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor.verify(message.storageDescriptor); + if (error) + return "storageDescriptor." + error; + } + if (message.parameters != null && message.hasOwnProperty("parameters")) { + if (!$util.isObject(message.parameters)) + return "parameters: object expected"; + var key = Object.keys(message.parameters); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.parameters[key[i]])) + return "parameters: string{k:string} expected"; + } + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.FieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a MetastorePartition message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartition} MetastorePartition + */ + MetastorePartition.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) + message.values[i] = String(object.values[i]); + } + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.storageDescriptor != null) { + if (typeof object.storageDescriptor !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.storageDescriptor: object expected"); + message.storageDescriptor = $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor.fromObject(object.storageDescriptor); + } + if (object.parameters) { + if (typeof object.parameters !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.parameters: object expected"); + message.parameters = {}; + for (var keys = Object.keys(object.parameters), i = 0; i < keys.length; ++i) + message.parameters[keys[i]] = String(object.parameters[keys[i]]); + } + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartition.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1alpha.FieldSchema.fromObject(object.fields[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartition message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartition} message MetastorePartition + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartition.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.values = []; + object.fields = []; + } + if (options.objects || options.defaults) + object.parameters = {}; + if (options.defaults) { + object.createTime = null; + object.storageDescriptor = null; + } + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = message.values[j]; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.storageDescriptor != null && message.hasOwnProperty("storageDescriptor")) + object.storageDescriptor = $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor.toObject(message.storageDescriptor, options); + var keys2; + if (message.parameters && (keys2 = Object.keys(message.parameters)).length) { + object.parameters = {}; + for (var j = 0; j < keys2.length; ++j) + object.parameters[keys2[j]] = message.parameters[keys2[j]]; + } + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1alpha.FieldSchema.toObject(message.fields[j], options); + } + return object; + }; + + /** + * Converts this MetastorePartition to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @instance + * @returns {Object.} JSON object + */ + MetastorePartition.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartition + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartition + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartition.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.MetastorePartition"; + }; + + return MetastorePartition; + })(); + + v1alpha.MetastorePartitionList = (function() { + + /** + * Properties of a MetastorePartitionList. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IMetastorePartitionList + * @property {Array.|null} [partitions] MetastorePartitionList partitions + */ + + /** + * Constructs a new MetastorePartitionList. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a MetastorePartitionList. + * @implements IMetastorePartitionList + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList=} [properties] Properties to set + */ + function MetastorePartitionList(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartitionList partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @instance + */ + MetastorePartitionList.prototype.partitions = $util.emptyArray; + + /** + * Creates a new MetastorePartitionList instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionList} MetastorePartitionList instance + */ + MetastorePartitionList.create = function create(properties) { + return new MetastorePartitionList(properties); + }; + + /** + * Encodes the specified MetastorePartitionList message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList} message MetastorePartitionList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionList.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MetastorePartitionList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionList} message MetastorePartitionList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionList.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionList} MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionList.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionList} MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionList.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartitionList message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartitionList.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a MetastorePartitionList message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionList} MetastorePartitionList + */ + MetastorePartitionList.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartitionList message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionList} message MetastorePartitionList + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartitionList.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this MetastorePartitionList to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @instance + * @returns {Object.} JSON object + */ + MetastorePartitionList.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartitionList + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionList + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartitionList.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.MetastorePartitionList"; + }; + + return MetastorePartitionList; + })(); + + v1alpha.ReadStream = (function() { + + /** + * Properties of a ReadStream. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IReadStream + * @property {string|null} [name] ReadStream name + */ + + /** + * Constructs a new ReadStream. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a ReadStream. + * @implements IReadStream + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IReadStream=} [properties] Properties to set + */ + function ReadStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @instance + */ + ReadStream.prototype.name = ""; + + /** + * Creates a new ReadStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IReadStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.ReadStream} ReadStream instance + */ + ReadStream.create = function create(properties) { + return new ReadStream(properties); + }; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ReadStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.ReadStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ReadStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.ReadStream} ReadStream + */ + ReadStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.ReadStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.ReadStream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1alpha.ReadStream} message ReadStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this ReadStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @instance + * @returns {Object.} JSON object + */ + ReadStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.ReadStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.ReadStream"; + }; + + return ReadStream; + })(); + + v1alpha.StreamList = (function() { + + /** + * Properties of a StreamList. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IStreamList + * @property {Array.|null} [streams] StreamList streams + */ + + /** + * Constructs a new StreamList. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a StreamList. + * @implements IStreamList + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IStreamList=} [properties] Properties to set + */ + function StreamList(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamList streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @instance + */ + StreamList.prototype.streams = $util.emptyArray; + + /** + * Creates a new StreamList instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamList=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.StreamList} StreamList instance + */ + StreamList.create = function create(properties) { + return new StreamList(properties); + }; + + /** + * Encodes the specified StreamList message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamList.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamList} message StreamList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamList.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1alpha.ReadStream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.StreamList.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IStreamList} message StreamList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamList.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamList message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.StreamList} StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamList.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamList(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1alpha.ReadStream.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamList message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.StreamList} StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamList.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamList message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamList.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1alpha.ReadStream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a StreamList message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.StreamList} StreamList + */ + StreamList.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.StreamList) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.StreamList(); + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.StreamList.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1alpha.StreamList.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1alpha.ReadStream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a StreamList message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1alpha.StreamList} message StreamList + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamList.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1alpha.ReadStream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this StreamList to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @instance + * @returns {Object.} JSON object + */ + StreamList.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamList + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.StreamList + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamList.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.StreamList"; + }; + + return StreamList; + })(); + + v1alpha.MetastorePartitionValues = (function() { + + /** + * Properties of a MetastorePartitionValues. + * @memberof google.cloud.bigquery.storage.v1alpha + * @interface IMetastorePartitionValues + * @property {Array.|null} [values] MetastorePartitionValues values + */ + + /** + * Constructs a new MetastorePartitionValues. + * @memberof google.cloud.bigquery.storage.v1alpha + * @classdesc Represents a MetastorePartitionValues. + * @implements IMetastorePartitionValues + * @constructor + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues=} [properties] Properties to set + */ + function MetastorePartitionValues(properties) { + this.values = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartitionValues values. + * @member {Array.} values + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @instance + */ + MetastorePartitionValues.prototype.values = $util.emptyArray; + + /** + * Creates a new MetastorePartitionValues instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues} MetastorePartitionValues instance + */ + MetastorePartitionValues.create = function create(properties) { + return new MetastorePartitionValues(properties); + }; + + /** + * Encodes the specified MetastorePartitionValues message. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues} message MetastorePartitionValues message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionValues.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.values[i]); + return writer; + }; + + /** + * Encodes the specified MetastorePartitionValues message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues} message MetastorePartitionValues message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionValues.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues} MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionValues.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues} MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionValues.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartitionValues message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartitionValues.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) + if (!$util.isString(message.values[i])) + return "values: string[] expected"; + } + return null; + }; + + /** + * Creates a MetastorePartitionValues message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues} MetastorePartitionValues + */ + MetastorePartitionValues.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues) + return object; + var message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) + message.values[i] = String(object.values[i]); + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartitionValues message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues} message MetastorePartitionValues + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartitionValues.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = message.values[j]; + } + return object; + }; + + /** + * Converts this MetastorePartitionValues to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @instance + * @returns {Object.} JSON object + */ + MetastorePartitionValues.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartitionValues + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartitionValues.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues"; + }; + + return MetastorePartitionValues; + })(); + + return v1alpha; + })(); + storage.v1beta1 = (function() { /** @@ -31703,6 +36899,225 @@ return Empty; })(); + protobuf.FieldMask = (function() { + + /** + * Properties of a FieldMask. + * @memberof google.protobuf + * @interface IFieldMask + * @property {Array.|null} [paths] FieldMask paths + */ + + /** + * Constructs a new FieldMask. + * @memberof google.protobuf + * @classdesc Represents a FieldMask. + * @implements IFieldMask + * @constructor + * @param {google.protobuf.IFieldMask=} [properties] Properties to set + */ + function FieldMask(properties) { + this.paths = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldMask paths. + * @member {Array.} paths + * @memberof google.protobuf.FieldMask + * @instance + */ + FieldMask.prototype.paths = $util.emptyArray; + + /** + * Creates a new FieldMask instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldMask + * @static + * @param {google.protobuf.IFieldMask=} [properties] Properties to set + * @returns {google.protobuf.FieldMask} FieldMask instance + */ + FieldMask.create = function create(properties) { + return new FieldMask(properties); + }; + + /** + * Encodes the specified FieldMask message. Does not implicitly {@link google.protobuf.FieldMask.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldMask + * @static + * @param {google.protobuf.IFieldMask} message FieldMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldMask.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.paths != null && message.paths.length) + for (var i = 0; i < message.paths.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.paths[i]); + return writer; + }; + + /** + * Encodes the specified FieldMask message, length delimited. Does not implicitly {@link google.protobuf.FieldMask.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldMask + * @static + * @param {google.protobuf.IFieldMask} message FieldMask message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldMask.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldMask message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldMask} FieldMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldMask.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldMask(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.paths && message.paths.length)) + message.paths = []; + message.paths.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldMask message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldMask + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldMask} FieldMask + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldMask.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldMask message. + * @function verify + * @memberof google.protobuf.FieldMask + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldMask.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.paths != null && message.hasOwnProperty("paths")) { + if (!Array.isArray(message.paths)) + return "paths: array expected"; + for (var i = 0; i < message.paths.length; ++i) + if (!$util.isString(message.paths[i])) + return "paths: string[] expected"; + } + return null; + }; + + /** + * Creates a FieldMask message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldMask + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldMask} FieldMask + */ + FieldMask.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldMask) + return object; + var message = new $root.google.protobuf.FieldMask(); + if (object.paths) { + if (!Array.isArray(object.paths)) + throw TypeError(".google.protobuf.FieldMask.paths: array expected"); + message.paths = []; + for (var i = 0; i < object.paths.length; ++i) + message.paths[i] = String(object.paths[i]); + } + return message; + }; + + /** + * Creates a plain object from a FieldMask message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldMask + * @static + * @param {google.protobuf.FieldMask} message FieldMask + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldMask.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.paths = []; + if (message.paths && message.paths.length) { + object.paths = []; + for (var j = 0; j < message.paths.length; ++j) + object.paths[j] = message.paths[j]; + } + return object; + }; + + /** + * Converts this FieldMask to JSON. + * @function toJSON + * @memberof google.protobuf.FieldMask + * @instance + * @returns {Object.} JSON object + */ + FieldMask.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldMask + * @function getTypeUrl + * @memberof google.protobuf.FieldMask + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldMask.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldMask"; + }; + + return FieldMask; + })(); + return protobuf; })(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index e5e3e0295c4..eaafe219649 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1177,6 +1177,486 @@ } } }, + "v1alpha": { + "options": { + "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1Alpha", + "go_package": "cloud.google.com/go/bigquery/storage/apiv1alpha/storagepb;storagepb", + "java_multiple_files": true, + "java_outer_classname": "MetastorePartitionProto", + "java_package": "com.google.cloud.bigquery.storage.v1alpha", + "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1alpha", + "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", + "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" + }, + "nested": { + "MetastorePartitionService": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "BatchCreateMetastorePartitions": { + "requestType": "BatchCreateMetastorePartitionsRequest", + "responseType": "BatchCreateMetastorePartitionsResponse", + "options": { + "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate", + "body": "*" + } + } + ] + }, + "BatchDeleteMetastorePartitions": { + "requestType": "BatchDeleteMetastorePartitionsRequest", + "responseType": "google.protobuf.Empty", + "options": { + "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete", + "body": "*" + } + } + ] + }, + "BatchUpdateMetastorePartitions": { + "requestType": "BatchUpdateMetastorePartitionsRequest", + "responseType": "BatchUpdateMetastorePartitionsResponse", + "options": { + "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate", + "body": "*" + } + } + ] + }, + "ListMetastorePartitions": { + "requestType": "ListMetastorePartitionsRequest", + "responseType": "ListMetastorePartitionsResponse", + "options": { + "(google.api.http).get": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list", + "(google.api.method_signature)": "parent" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list" + } + }, + { + "(google.api.method_signature)": "parent" + } + ] + }, + "StreamMetastorePartitions": { + "requestType": "StreamMetastorePartitionsRequest", + "requestStream": true, + "responseType": "StreamMetastorePartitionsResponse", + "responseStream": true + } + } + }, + "CreateMetastorePartitionRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "metastorePartition": { + "type": "MetastorePartition", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCreateMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "requests": { + "rule": "repeated", + "type": "CreateMetastorePartitionRequest", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "skipExistingPartitions": { + "type": "bool", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "BatchCreateMetastorePartitionsResponse": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1 + } + } + }, + "BatchDeleteMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "partitionValues": { + "rule": "repeated", + "type": "MetastorePartitionValues", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "UpdateMetastorePartitionRequest": { + "fields": { + "metastorePartition": { + "type": "MetastorePartition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "updateMask": { + "type": "google.protobuf.FieldMask", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "BatchUpdateMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "requests": { + "rule": "repeated", + "type": "UpdateMetastorePartitionRequest", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchUpdateMetastorePartitionsResponse": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1 + } + } + }, + "ListMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "filter": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "ListMetastorePartitionsResponse": { + "oneofs": { + "response": { + "oneof": [ + "partitions", + "streams" + ] + } + }, + "fields": { + "partitions": { + "type": "MetastorePartitionList", + "id": 1 + }, + "streams": { + "type": "StreamList", + "id": 2 + } + } + }, + "StreamMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "metastorePartitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "skipExistingPartitions": { + "type": "bool", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "StreamMetastorePartitionsResponse": { + "fields": { + "totalPartitionsStreamedCount": { + "type": "int64", + "id": 2 + }, + "totalPartitionsInsertedCount": { + "type": "int64", + "id": 3 + } + } + }, + "BatchSizeTooLargeError": { + "fields": { + "maxBatchSize": { + "type": "int64", + "id": 1 + }, + "errorMessage": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "FieldSchema": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "type": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "StorageDescriptor": { + "fields": { + "locationUri": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "inputFormat": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "outputFormat": { + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "serdeInfo": { + "type": "SerDeInfo", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "SerDeInfo": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "serializationLibrary": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "parameters": { + "keyType": "string", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "MetastorePartition": { + "fields": { + "values": { + "rule": "repeated", + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "createTime": { + "type": "google.protobuf.Timestamp", + "id": 2, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "storageDescriptor": { + "type": "StorageDescriptor", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "parameters": { + "keyType": "string", + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "fields": { + "rule": "repeated", + "type": "FieldSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "MetastorePartitionList": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "ReadStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}", + "(google.api.resource).plural": "readStreams", + "(google.api.resource).singular": "readStream" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "IDENTIFIER" + } + } + } + }, + "StreamList": { + "fields": { + "streams": { + "rule": "repeated", + "type": "ReadStream", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + } + }, + "MetastorePartitionValues": { + "fields": { + "values": { + "rule": "repeated", + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + } + } + }, "v1beta1": { "options": { "go_package": "cloud.google.com/go/bigquery/storage/apiv1beta1/storagepb;storagepb", @@ -3071,6 +3551,15 @@ }, "Empty": { "fields": {} + }, + "FieldMask": { + "fields": { + "paths": { + "rule": "repeated", + "type": "string", + "id": 1 + } + } } } }, diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js new file mode 100644 index 00000000000..05500209f3c --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js @@ -0,0 +1,76 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, requests) { + // [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to where the metastore partitions to be + * added, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. Requests to add metastore partitions to the table. + */ + // const requests = [1,2,3,4] + /** + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS if any partition already exists. If the flag is set to true, + * the server will skip existing partitions and insert only the non-existing + * partitions. + */ + // const skipExistingPartitions = true + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchCreateMetastorePartitions() { + // Construct request + const request = { + parent, + requests, + }; + + // Run request + const response = await storageClient.batchCreateMetastorePartitions(request); + console.log(response); + } + + callBatchCreateMetastorePartitions(); + // [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js new file mode 100644 index 00000000000..d991e64e34c --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js @@ -0,0 +1,69 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, partitionValues) { + // [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. The list of metastore partitions (identified by its values) to be + * deleted. A maximum of 100 partitions can be deleted in a batch. + */ + // const partitionValues = [1,2,3,4] + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchDeleteMetastorePartitions() { + // Construct request + const request = { + parent, + partitionValues, + }; + + // Run request + const response = await storageClient.batchDeleteMetastorePartitions(request); + console.log(response); + } + + callBatchDeleteMetastorePartitions(); + // [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js new file mode 100644 index 00000000000..51c097328e2 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js @@ -0,0 +1,68 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, requests) { + // [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. Requests to update metastore partitions in the table. + */ + // const requests = [1,2,3,4] + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchUpdateMetastorePartitions() { + // Construct request + const request = { + parent, + requests, + }; + + // Run request + const response = await storageClient.batchUpdateMetastorePartitions(request); + console.log(response); + } + + callBatchUpdateMetastorePartitions(); + // [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js new file mode 100644 index 00000000000..fd515972b8c --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js @@ -0,0 +1,75 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Optional. SQL text filtering statement, similar to a WHERE clause in a + * query. Only supports single-row expressions. Aggregate functions are not + * supported. + * Examples: "int_field > 5" + * "date_field = CAST('2014-9-27' as DATE)" + * "nullable_field is not NULL" + * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + * "numeric_field BETWEEN 1.0 AND 5.0" + * Restricted to a maximum length for 1 MB. + */ + // const filter = 'abc123' + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callListMetastorePartitions() { + // Construct request + const request = { + parent, + }; + + // Run request + const response = await storageClient.listMetastorePartitions(request); + console.log(response); + } + + callListMetastorePartitions(); + // [END bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js new file mode 100644 index 00000000000..995cfb76097 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js @@ -0,0 +1,82 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to where the partition to be added, in the + * format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Optional. A list of metastore partitions to be added to the table. + */ + // const metastorePartitions = [1,2,3,4] + /** + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS on commit if any partition already exists. If the flag is + * set to true: + * 1) the server will skip existing partitions + * insert only the non-existing partitions as part of the commit. + * 2) The client must set the `skip_existing_partitions` field to true for + * all requests in the stream. + */ + // const skipExistingPartitions = true + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callStreamMetastorePartitions() { + // Construct request + const request = { + parent, + }; + + // Run request + const stream = await storageClient.streamMetastorePartitions(); + stream.on('data', (response) => { console.log(response) }); + stream.on('error', (err) => { throw(err) }); + stream.on('end', () => { /* API call completed */ }); + stream.write(request); + stream.end(); + } + + callStreamMetastorePartitions(); + // [END bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json new file mode 100644 index 00000000000..c9a4a2c4fbf --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -0,0 +1,243 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "4.9.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1alpha", + "version": "v1alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", + "title": "MetastorePartitionService batchCreateMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Adds metastore partitions to a table.", + "canonical": true, + "file": "metastore_partition_service.batch_create_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 68, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCreateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "requests", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "skip_existing_partitions", + "type": "TYPE_BOOL" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchCreateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", + "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Deletes metastore partitions from a table.", + "canonical": true, + "file": "metastore_partition_service.batch_delete_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchDeleteMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "partition_values", + "type": "TYPE_MESSAGE[]" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchDeleteMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", + "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Updates metastore partitions in a table.", + "canonical": true, + "file": "metastore_partition_service.batch_update_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 60, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchUpdateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "requests", + "type": "TYPE_MESSAGE[]" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchUpdateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async", + "title": "MetastorePartitionService listMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Gets metastore partitions from a table.", + "canonical": true, + "file": "metastore_partition_service.list_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 67, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "ListMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async", + "title": "MetastorePartitionService streamMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).", + "canonical": true, + "file": "metastore_partition_service.stream_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 74, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StreamMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "metastore_partitions", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "skip_existing_partitions", + "type": "TYPE_BOOL" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "StreamMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService" + } + } + } + } + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 8733d044438..7f6d42fdaa5 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -18,6 +18,7 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; +import * as v1alpha from './v1alpha'; import * as managedwriter from './managedwriter'; const BigQueryReadClient = v1.BigQueryReadClient; type BigQueryReadClient = v1.BigQueryReadClient; @@ -31,6 +32,7 @@ export { v1, BigQueryReadClient, v1beta1, + v1alpha, BigQueryStorageClient, BigQueryWriteClient, managedwriter, diff --git a/handwritten/bigquery-storage/src/v1alpha/gapic_metadata.json b/handwritten/bigquery-storage/src/v1alpha/gapic_metadata.json new file mode 100644 index 00000000000..5ff06498148 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1alpha/gapic_metadata.json @@ -0,0 +1,68 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1alpha", + "libraryPackage": "@google-cloud/storage", + "services": { + "MetastorePartitionService": { + "clients": { + "grpc": { + "libraryClient": "MetastorePartitionServiceClient", + "rpcs": { + "BatchCreateMetastorePartitions": { + "methods": [ + "batchCreateMetastorePartitions" + ] + }, + "BatchDeleteMetastorePartitions": { + "methods": [ + "batchDeleteMetastorePartitions" + ] + }, + "BatchUpdateMetastorePartitions": { + "methods": [ + "batchUpdateMetastorePartitions" + ] + }, + "ListMetastorePartitions": { + "methods": [ + "listMetastorePartitions" + ] + }, + "StreamMetastorePartitions": { + "methods": [ + "streamMetastorePartitions" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MetastorePartitionServiceClient", + "rpcs": { + "BatchCreateMetastorePartitions": { + "methods": [ + "batchCreateMetastorePartitions" + ] + }, + "BatchDeleteMetastorePartitions": { + "methods": [ + "batchDeleteMetastorePartitions" + ] + }, + "BatchUpdateMetastorePartitions": { + "methods": [ + "batchUpdateMetastorePartitions" + ] + }, + "ListMetastorePartitions": { + "methods": [ + "listMetastorePartitions" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1alpha/index.ts b/handwritten/bigquery-storage/src/v1alpha/index.ts new file mode 100644 index 00000000000..1f399cceb88 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MetastorePartitionServiceClient} from './metastore_partition_service_client'; diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts new file mode 100644 index 00000000000..ac5361efb89 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts @@ -0,0 +1,1002 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type { + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import {PassThrough} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); + +/** + * Client JSON configuration object, loaded from + * `src/v1alpha/metastore_partition_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './metastore_partition_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * BigQuery Metastore Partition Service API. + * This service is used for managing metastore partitions in BigQuery metastore. + * The service supports only batch operations for write. + * @class + * @memberof v1alpha + */ +export class MetastorePartitionServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + private _universeDomain: string; + private _servicePath: string; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + metastorePartitionServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MetastorePartitionServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MetastorePartitionServiceClient({fallback: true}, gax); + * ``` + */ + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { + // Ensure that options include all the required fields. + const staticMembers = this + .constructor as typeof MetastorePartitionServiceClient; + if ( + opts?.universe_domain && + opts?.universeDomain && + opts?.universe_domain !== opts?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.' + ); + } + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + this._universeDomain = + opts?.universeDomain ?? + opts?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; + this._servicePath = 'bigquerystorage.' + this._universeDomain; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || this._servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // Request numeric enum values if REST transport is used. + opts.numericEnums = true; + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== this._servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = this._servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === this._servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process === 'object' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}' + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + streamMetastorePartitions: new this._gaxModule.StreamDescriptor( + this._gaxModule.StreamType.BIDI_STREAMING, + !!opts.fallback, + !!opts.gaxServerStreamingRetries + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.metastorePartitionServiceStub) { + return this.metastorePartitionServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1alpha.MetastorePartitionService. + this.metastorePartitionServiceStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1alpha + .MetastorePartitionService, + this._opts, + this._providedCustomServicePath + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const metastorePartitionServiceStubMethods = [ + 'batchCreateMetastorePartitions', + 'batchDeleteMetastorePartitions', + 'batchUpdateMetastorePartitions', + 'listMetastorePartitions', + 'streamMetastorePartitions', + ]; + for (const methodName of metastorePartitionServiceStubMethods) { + const callPromise = this.metastorePartitionServiceStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough(); + setImmediate(() => { + stream.emit( + 'error', + new this._gaxModule.GoogleError( + 'The client has already been closed.' + ) + ); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + } + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.metastorePartitionServiceStub; + } + + /** + * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static servicePath is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static apiEndpoint is deprecated, please use the instance method instead.', + 'DeprecationWarning' + ); + } + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + + get universeDomain() { + return this._universeDomain; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + /** + * Adds metastore partitions to a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to where the metastore partitions to be + * added, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.requests + * Required. Requests to add metastore partitions to the table. + * @param {boolean} [request.skipExistingPartitions] + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS if any partition already exists. If the flag is set to true, + * the server will skip existing partitions and insert only the non-existing + * partitions. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse|BatchCreateMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js + * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async + */ + batchCreateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchCreateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCreateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchCreateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.batchCreateMetastorePartitions( + request, + options, + callback + ); + } + /** + * Deletes metastore partitions from a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.partitionValues + * Required. The list of metastore partitions (identified by its values) to be + * deleted. A maximum of 100 partitions can be deleted in a batch. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.protobuf.Empty|Empty}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js + * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async + */ + batchDeleteMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchDeleteMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchDeleteMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchDeleteMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.batchDeleteMetastorePartitions( + request, + options, + callback + ); + } + /** + * Updates metastore partitions in a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.requests + * Required. Requests to update metastore partitions in the table. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse|BatchUpdateMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js + * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async + */ + batchUpdateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchUpdateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchUpdateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + batchUpdateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.batchUpdateMetastorePartitions( + request, + options, + callback + ); + } + /** + * Gets metastore partitions from a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {string} [request.filter] + * Optional. SQL text filtering statement, similar to a WHERE clause in a + * query. Only supports single-row expressions. Aggregate functions are not + * supported. + * + * Examples: "int_field > 5" + * "date_field = CAST('2014-9-27' as DATE)" + * "nullable_field is not NULL" + * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + * "numeric_field BETWEEN 1.0 AND 5.0" + * Restricted to a maximum length for 1 MB. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse|ListMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js + * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async + */ + listMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, + options?: CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + listMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + listMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + listMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize(); + return this.innerApiCalls.listMetastorePartitions( + request, + options, + callback + ); + } + + /** + * This is a bi-di streaming rpc method that allows the client to send + * a stream of partitions and commit all of them atomically at the end. + * If the commit is successful, the server will return a + * response and close the stream. If the commit fails (due to duplicate + * partitions or other reason), the server will close the stream with an + * error. This method is only available via the gRPC API (not REST). + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest|StreamMetastorePartitionsRequest} for write() method, and + * will emit objects representing {@link protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse|StreamMetastorePartitionsResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js + * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async + */ + streamMetastorePartitions(options?: CallOptions): gax.CancellableStream { + this.initialize(); + return this.innerApiCalls.streamMetastorePartitions(null, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string + ) { + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.metastorePartitionServiceStub && !this._terminated) { + return this.metastorePartitionServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client_config.json b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client_config.json new file mode 100644 index 00000000000..2c57cdac831 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client_config.json @@ -0,0 +1,54 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "BatchCreateMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchDeleteMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchUpdateMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "ListMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "StreamMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_proto_list.json b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_proto_list.json new file mode 100644 index 00000000000..6c966cebce6 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_proto_list.json @@ -0,0 +1,4 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto", + "../../protos/google/cloud/bigquery/storage/v1alpha/partition.proto" +] diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts new file mode 100644 index 00000000000..925501688bd --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts @@ -0,0 +1,1131 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as metastorepartitionserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json') +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubBidiStreamingCall( + response?: ResponseType, + error?: Error +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + return sinon.stub().returns(mockStream); +} + +describe('v1alpha.MetastorePartitionServiceClient', () => { + describe('Common methods', () => { + it('has apiEndpoint', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(); + const apiEndpoint = client.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + }); + + it('has universeDomain', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(); + const universeDomain = client.universeDomain; + assert.strictEqual(universeDomain, 'googleapis.com'); + }); + + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + it('throws DeprecationWarning if static servicePath is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const servicePath = + metastorepartitionserviceModule.v1alpha + .MetastorePartitionServiceClient.servicePath; + assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + + it('throws DeprecationWarning if static apiEndpoint is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const apiEndpoint = + metastorepartitionserviceModule.v1alpha + .MetastorePartitionServiceClient.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + } + it('sets apiEndpoint according to universe domain camelCase', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + {universeDomain: 'example.com'} + ); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + it('sets apiEndpoint according to universe domain snakeCase', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + {universe_domain: 'example.com'} + ); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + if (typeof process === 'object' && 'env' in process) { + describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => { + it('sets apiEndpoint from environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + + it('value configured in code has priority over environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + {universeDomain: 'configured.example.com'} + ); + const servicePath = client.apiEndpoint; + assert.strictEqual( + servicePath, + 'bigquerystorage.configured.example.com' + ); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + }); + } + it('does not allow setting both universeDomain and universe_domain', () => { + assert.throws(() => { + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + {universe_domain: 'example.com', universeDomain: 'example.net'} + ); + }); + }); + + it('has port', () => { + const port = + metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient + .port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + fallback: true, + } + ); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + assert.strictEqual(client.metastorePartitionServiceStub, undefined); + await client.initialize(); + assert(client.metastorePartitionServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + assert(client.metastorePartitionServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + assert.strictEqual(client.metastorePartitionServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('batchCreateMetastorePartitions', () => { + it('invokes batchCreateMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse() + ); + client.innerApiCalls.batchCreateMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchCreateMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse() + ); + client.innerApiCalls.batchCreateMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchCreateMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCreateMetastorePartitions = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchCreateMetastorePartitions(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchCreateMetastorePartitions(request), + expectedError + ); + }); + }); + + describe('batchDeleteMetastorePartitions', () => { + it('invokes batchDeleteMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.batchDeleteMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchDeleteMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.batchDeleteMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchDeleteMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchDeleteMetastorePartitions = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchDeleteMetastorePartitions(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchDeleteMetastorePartitions(request), + expectedError + ); + }); + }); + + describe('batchUpdateMetastorePartitions', () => { + it('invokes batchUpdateMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse() + ); + client.innerApiCalls.batchUpdateMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchUpdateMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse() + ); + client.innerApiCalls.batchUpdateMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchUpdateMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchUpdateMetastorePartitions = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.batchUpdateMetastorePartitions(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.batchUpdateMetastorePartitions(request), + expectedError + ); + }); + }); + + describe('listMetastorePartitions', () => { + it('invokes listMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse() + ); + client.innerApiCalls.listMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.listMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse() + ); + client.innerApiCalls.listMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedError = new Error('expected'); + client.innerApiCalls.listMetastorePartitions = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects( + client.listMetastorePartitions(request), + expectedError + ); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', + ['parent'] + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects( + client.listMetastorePartitions(request), + expectedError + ); + }); + }); + + describe('streamMetastorePartitions', () => { + it('invokes streamMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest() + ); + + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse() + ); + client.innerApiCalls.streamMetastorePartitions = + stubBidiStreamingCall(expectedResponse); + const stream = client.streamMetastorePartitions(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.streamMetastorePartitions as SinonStub) + .getCall(0) + .calledWith(null) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + + it('invokes streamMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest() + ); + const expectedError = new Error('expected'); + client.innerApiCalls.streamMetastorePartitions = stubBidiStreamingCall( + undefined, + expectedError + ); + const stream = client.streamMetastorePartitions(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + ) => { + resolve(response); + } + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + await assert.rejects(promise, expectedError); + assert( + (client.innerApiCalls.streamMetastorePartitions as SinonStub) + .getCall(0) + .calledWith(null) + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request + ); + }); + }); + + describe('Path templates', () => { + describe('readStream', () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + + describe('table', () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = + new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); + }); +}); From ba88fbe5339dcfa0919f22df50f66fdea84b854e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:09:25 -0400 Subject: [PATCH 290/333] chore: update issue templates and codeowners (#480) chore: update issue templates and codeowners Source-Link: https://github.com/googleapis/synthtool/commit/bf182cd41d9a7de56092cafcc7befe6b398332f6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:a5af6af827a9fffba373151e1453b0498da288024cdd16477900dd42857a42e0 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../bigquery-storage/.github/CODEOWNERS | 7 +- .../.github/ISSUE_TEMPLATE/bug_report.yml | 99 +++++++++++++++++++ .../ISSUE_TEMPLATE/documentation_request.yml | 53 ++++++++++ .../ISSUE_TEMPLATE/feature_request.yml | 53 ++++++++++ .../ISSUE_TEMPLATE/processs_request.md | 5 + .../.github/ISSUE_TEMPLATE/questions.md | 8 ++ .../bigquery-storage/.github/auto-approve.yml | 4 +- .../.github/scripts/close-invalid-link.cjs | 53 ++++++++++ .../.github/scripts/close-unresponsive.cjs | 69 +++++++++++++ .../.github/scripts/remove-response-label.cjs | 33 +++++++ handwritten/bigquery-storage/README.md | 4 +- 12 files changed, 380 insertions(+), 12 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md create mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md create mode 100644 handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs create mode 100644 handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs create mode 100644 handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 9e90d54bfb2..460f67f2b60 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:d920257482ca1cd72978f29f7d28765a9f8c758c21ee0708234db5cf4c5016c2 -# created: 2024-06-12T16:18:41.688792375Z + digest: sha256:a5af6af827a9fffba373151e1453b0498da288024cdd16477900dd42857a42e0 +# created: 2024-09-20T20:26:11.126243246Z diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS index 6d82f1f7467..d982c0a9506 100644 --- a/handwritten/bigquery-storage/.github/CODEOWNERS +++ b/handwritten/bigquery-storage/.github/CODEOWNERS @@ -5,8 +5,5 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# The yoshi-nodejs team is the default owner for nodejs repositories. -* @googleapis/yoshi-nodejs @googleapis/api-bigquery - -# The github automation team is the default owner for the auto-approve file. -.github/auto-approve.yml @googleapis/github-automation +# Unless specified, the jsteam is the default owner for nodejs repositories. +* @googleapis/api-bigquery @googleapis/jsteam \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000000..f11f8e0fabd --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,99 @@ +name: Bug Report +description: Create a report to help us improve +labels: + - bug +body: + - type: markdown + attributes: + value: > + **PLEASE READ**: If you have a support contract with Google, please + create an issue in the [support + console](https://cloud.google.com/support/) instead of filing on GitHub. + This will ensure a timely response. Otherwise, please make sure to + follow the steps below. + - type: checkboxes + attributes: + label: Please make sure you have searched for information in the following + guides. + options: + - label: "Search the issues already opened: + https://github.com/GoogleCloudPlatform/google-cloud-node/issues" + required: true + - label: "Search StackOverflow: + http://stackoverflow.com/questions/tagged/google-cloud-platform+nod\ + e.js" + required: true + - label: "Check our Troubleshooting guide: + https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ + es/troubleshooting" + required: true + - label: "Check our FAQ: + https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ + es/faq" + required: true + - label: "Check our libraries HOW-TO: + https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ + .md" + required: true + - label: "Check out our authentication guide: + https://github.com/googleapis/google-auth-library-nodejs" + required: true + - label: "Check out handwritten samples for many of our APIs: + https://github.com/GoogleCloudPlatform/nodejs-docs-samples" + required: true + - type: textarea + attributes: + label: > + A screenshot that you have tested with "Try this API". + description: > + As our client libraries are mostly autogenerated, we kindly request + that you test whether your issue is with the client library, or with the + API itself. To do so, please search for your API + here: https://developers.google.com/apis-explorer and attempt to + reproduce the issue in the given method. Please include a screenshot of + the response in "Try this API". This response should NOT match the current + behavior you are experiencing. If the behavior is the same, it means + that you are likely experiencing a bug with the API itself. In that + case, please submit an issue to the API team, either by submitting an + issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers, or by + submitting an issue in its linked tracker in the .repo-metadata.json + file https://b.corp.google.com/savedsearches/559654 + validations: + required: true + - type: input + attributes: + label: > + Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal + reproduction. + description: > + **Skipping this or providing an invalid link will result in the issue being closed** + validations: + required: true + - type: textarea + attributes: + label: > + A step-by-step description of how to reproduce the issue, based on + the linked reproduction. + description: > + Screenshots can be provided in the issue body below. + placeholder: | + 1. Start the application in development (next dev) + 2. Click X + 3. Y will happen + validations: + required: true + - type: textarea + attributes: + label: A clear and concise description of what the bug is, and what you + expected to happen. + placeholder: Following the steps from the previous section, I expected A to + happen, but I observed B instead + validations: + required: true + + - type: textarea + attributes: + label: A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + placeholder: 'Documentation here(link) states that B should happen instead of A' + validations: + required: true diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml new file mode 100644 index 00000000000..e78086c752b --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml @@ -0,0 +1,53 @@ +name: Documentation Requests +description: Requests for more information +body: + - type: markdown + attributes: + value: > + Please use this issue type to log documentation requests against the library itself. + These requests should involve documentation on Github (`.md` files), and should relate to the library + itself. If you have questions or documentation requests for an API, please + reach out to the API tracker itself. + + Please submit an issue to the API team, either by submitting an + issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers), or by + submitting an issue in its linked tracker in the .repo-metadata.json + file in the API under packages/* ([example](https://b.corp.google.com/savedsearches/559654)). + You can also submit a request to documentation on cloud.google.com itself with the "Send Feedback" + on the bottom of the page. + + + Please note that documentation requests and questions for specific APIs + will be closed. + - type: checkboxes + attributes: + label: Please make sure you have searched for information in the following + guides. + options: + - label: "Search the issues already opened: + https://github.com/GoogleCloudPlatform/google-cloud-node/issues" + required: true + - label: "Check our Troubleshooting guide: + https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ + es/troubleshooting" + required: true + - label: "Check our FAQ: + https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ + es/faq" + required: true + - label: "Check our libraries HOW-TO: + https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ + .md" + required: true + - label: "Check out our authentication guide: + https://github.com/googleapis/google-auth-library-nodejs" + required: true + - label: "Check out handwritten samples for many of our APIs: + https://github.com/GoogleCloudPlatform/nodejs-docs-samples" + required: true + - type: textarea + attributes: + label: > + Documentation Request + validations: + required: true diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000000..344778ae1f2 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,53 @@ +name: Feature Request +description: Suggest an idea for this library +labels: + - feature request +body: + - type: markdown + attributes: + value: > + **PLEASE READ**: If you have a support contract with Google, please + create an issue in the [support + console](https://cloud.google.com/support/) instead of filing on GitHub. + This will ensure a timely response. Otherwise, please make sure to + follow the steps below. + - type: textarea + attributes: + label: > + A screenshot that you have tested with "Try this API". + description: > + As our client libraries are mostly autogenerated, we kindly request + that you test whether your feature request is with the client library, or with the + API itself. To do so, please search for your API + here: https://developers.google.com/apis-explorer and attempt to + reproduce the issue in the given method. Please include a screenshot of + the response in "Try this API". This response should NOT match the current + behavior you are experiencing. If the behavior is the same, it means + that you are likely requesting a feature for the API itself. In that + case, please submit an issue to the API team, either by submitting an + issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers, or by + submitting an issue in its linked tracker in the .repo-metadata.json + file in the API under packages/* ([example](https://b.corp.google.com/savedsearches/559654)) + + Example of library specific issues would be: retry strategies, authentication questions, or issues with typings. + Examples of API issues would include: expanding method parameter types, adding functionality to an API. + validations: + required: true + - type: textarea + attributes: + label: > + What would you like to see in the library? + description: > + Screenshots can be provided in the issue body below. + placeholder: | + 1. Set up authentication like so + 2. Run the program like so + 3. X would be nice to happen + + - type: textarea + attributes: + label: Describe alternatives you've considered + + - type: textarea + attributes: + label: Additional context/notes \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md new file mode 100644 index 00000000000..9f88fc1f3b7 --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md @@ -0,0 +1,5 @@ +--- +name: Process Request +about: Submit a process request to the library. Process requests are any requests related to library infrastructure, including CI/CD, publishing, releasing, etc. This issue template should primarily used by internal members. + +--- \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md new file mode 100644 index 00000000000..62c1dd1b93a --- /dev/null +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md @@ -0,0 +1,8 @@ +--- +name: Question +about: If you have a question, please use Discussions + +--- + +If you have a general question that goes beyond the library itself, we encourage you to use [Discussions](https://github.com//discussions) +to engage with fellow community members! diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml index ec51b072dca..7cba0af636c 100644 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ b/handwritten/bigquery-storage/.github/auto-approve.yml @@ -1,4 +1,2 @@ processes: - - "NodeDependency" - - "OwlBotTemplateChangesNode" - - "OwlBotPRsNode" \ No newline at end of file + - "NodeDependency" \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs new file mode 100644 index 00000000000..ba7d51372ac --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs @@ -0,0 +1,53 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +async function closeIssue(github, owner, repo, number) { + await github.rest.issues.createComment({ + owner: owner, + repo: repo, + issue_number: number, + body: 'Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)' + }); + await github.rest.issues.update({ + owner: owner, + repo: repo, + issue_number: number, + state: 'closed' + }); +} +module.exports = async ({github, context}) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const number = context.issue.number; + + const issue = await github.rest.issues.get({ + owner: owner, + repo: repo, + issue_number: number, + }); + + const isBugTemplate = issue.data.body.includes('Link to the code that reproduces this issue'); + + if (isBugTemplate) { + try { + const link = issue.data.body.split('\n')[18].match(/(https?:\/\/g?i?s?t?\.?github.com\/.*)/); + const isValidLink = (await fetch(link)).ok; + if (!isValidLink) { + await closeIssue(github, owner, repo, number); + } + } catch (err) { + await closeIssue(github, owner, repo, number); + } + } +}; diff --git a/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs b/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs new file mode 100644 index 00000000000..142dc1265a4 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs @@ -0,0 +1,69 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +function labeledEvent(data) { + return data.event === 'labeled' && data.label.name === 'needs more info'; + } + + const numberOfDaysLimit = 15; + const close_message = `This has been closed since a request for information has \ + not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ + requested information is provided.`; + + module.exports = async ({github, context}) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + + const issues = await github.rest.issues.listForRepo({ + owner: owner, + repo: repo, + labels: 'needs more info', + }); + const numbers = issues.data.map((e) => e.number); + + for (const number of numbers) { + const events = await github.paginate( + github.rest.issues.listEventsForTimeline, + { + owner: owner, + repo: repo, + issue_number: number, + }, + (response) => response.data.filter(labeledEvent) + ); + + const latest_response_label = events[events.length - 1]; + + const created_at = new Date(latest_response_label.created_at); + const now = new Date(); + const diff = now - created_at; + const diffDays = diff / (1000 * 60 * 60 * 24); + + if (diffDays > numberOfDaysLimit) { + await github.rest.issues.update({ + owner: owner, + repo: repo, + issue_number: number, + state: 'closed', + }); + + await github.rest.issues.createComment({ + owner: owner, + repo: repo, + issue_number: number, + body: close_message, + }); + } + } + }; diff --git a/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs b/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs new file mode 100644 index 00000000000..887cf349e9d --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs @@ -0,0 +1,33 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +module.exports = async ({ github, context }) => { + const commenter = context.actor; + const issue = await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const author = issue.data.user.login; + const labels = issue.data.labels.map((e) => e.name); + + if (author === commenter && labels.includes('needs more info')) { + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + name: 'needs more info', + }); + } + }; diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index f9f0725b51a..5b2e00f6acb 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -149,7 +149,7 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained]. 1. [Select or create a Cloud Platform project][projects]. 1. [Enable billing for your project][billing]. 1. [Enable the Google BigQuery Storage API][enable_api]. -1. [Set up authentication with a service account][auth] so you can access the +1. [Set up authentication][auth] so you can access the API from your local workstation. ### Installing the client library @@ -368,4 +368,4 @@ See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/LI [projects]: https://console.cloud.google.com/project [billing]: https://support.google.com/cloud/answer/6293499#enable-billing [enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerystorage.googleapis.com -[auth]: https://cloud.google.com/docs/authentication/getting-started +[auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local From 01682871b6efbaf2c6bb7f5452f8e2ca6cd24748 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Mon, 23 Sep 2024 13:52:25 -0400 Subject: [PATCH 291/333] feat: add wrapper for reading table data using Storage API (#431) Add support for easily reading Tables using the BigQuery Storage API instead of the BigQuery API. This will provide increased performance and reduced memory usage for most use cases and will allow users to keep using the same interface as they used to use on our main library or fetch data directly via a new veneer on BigQuery Storage Read API --- handwritten/bigquery-storage/package.json | 13 +- handwritten/bigquery-storage/src/index.ts | 7 + .../src/reader/arrow_reader.ts | 101 ++++ .../src/reader/arrow_transform.ts | 176 ++++++ .../src/reader/data_format.ts | 33 ++ .../bigquery-storage/src/reader/index.ts | 35 ++ .../src/reader/read_client.ts | 202 +++++++ .../src/reader/read_session.ts | 149 +++++ .../src/reader/read_stream.ts | 190 +++++++ .../src/reader/table_reader.ts | 136 +++++ .../bigquery-storage/system-test/install.ts | 1 + .../system-test/reader_client_test.ts | 512 ++++++++++++++++++ .../test/reader/arrow_transform.ts | 129 +++++ 13 files changed, 1679 insertions(+), 5 deletions(-) create mode 100644 handwritten/bigquery-storage/src/reader/arrow_reader.ts create mode 100644 handwritten/bigquery-storage/src/reader/arrow_transform.ts create mode 100644 handwritten/bigquery-storage/src/reader/data_format.ts create mode 100644 handwritten/bigquery-storage/src/reader/index.ts create mode 100644 handwritten/bigquery-storage/src/reader/read_client.ts create mode 100644 handwritten/bigquery-storage/src/reader/read_session.ts create mode 100644 handwritten/bigquery-storage/src/reader/read_stream.ts create mode 100644 handwritten/bigquery-storage/src/reader/table_reader.ts create mode 100644 handwritten/bigquery-storage/system-test/reader_client_test.ts create mode 100644 handwritten/bigquery-storage/test/reader/arrow_transform.ts diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c0c086a5e0c..6b68129f383 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,18 +27,21 @@ "precompile": "gts clean" }, "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "apache-arrow": "^14.0.2", + "core-js": "^3.37.1", "extend": "^3.0.2", - "google-gax": "^4.3.1", - "google-auth-library": "^9.6.3" + "google-auth-library": "^9.6.3", + "google-gax": "^4.3.1" }, "peerDependencies": { "protobufjs": "^7.2.4" }, "devDependencies": { - "@google-cloud/bigquery": "^7.0.0", + "@google-cloud/bigquery": "^7.5.2", "@types/extend": "^3.0.4", "@types/mocha": "^9.0.0", - "@types/node": "^20.0.0", + "@types/node": "^20.16.5", "@types/sinon": "^17.0.0", "@types/uuid": "^9.0.1", "c8": "^9.0.0", @@ -55,7 +58,7 @@ "nise": "6.0.0", "path-to-regexp": "6.3.0", "ts-loader": "^9.0.0", - "typescript": "^5.1.6", + "typescript": "^5.5.3", "uuid": "^9.0.0", "webpack": "^5.0.0", "webpack-cli": "^5.0.0" diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index 7f6d42fdaa5..c095dfe9d57 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -20,6 +20,7 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; import * as v1alpha from './v1alpha'; import * as managedwriter from './managedwriter'; +import * as reader from './reader'; const BigQueryReadClient = v1.BigQueryReadClient; type BigQueryReadClient = v1.BigQueryReadClient; const BigQueryWriteClient = v1.BigQueryWriteClient; @@ -28,6 +29,8 @@ const BigQueryStorageClient = v1beta1.BigQueryStorageClient; type BigQueryStorageClient = v1beta1.BigQueryStorageClient; const WriterClient = managedwriter.WriterClient; type WriterClient = managedwriter.WriterClient; +const ReadClient = reader.ReadClient; +type ReadClient = reader.ReadClient; export { v1, BigQueryReadClient, @@ -37,6 +40,8 @@ export { BigQueryWriteClient, managedwriter, WriterClient, + reader, + ReadClient, }; // For compatibility with JavaScript libraries we need to provide this default export: // tslint:disable-next-line no-default-export @@ -46,6 +51,8 @@ export default { BigQueryWriteClient, managedwriter, WriterClient, + reader, + ReadClient, }; import * as protos from '../protos/protos'; export {protos}; diff --git a/handwritten/bigquery-storage/src/reader/arrow_reader.ts b/handwritten/bigquery-storage/src/reader/arrow_reader.ts new file mode 100644 index 00000000000..01439769aa4 --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/arrow_reader.ts @@ -0,0 +1,101 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {ResourceStream} from '@google-cloud/paginator'; +import {RecordBatch} from 'apache-arrow'; + +import * as protos from '../../protos/protos'; +import {TableReference, ReadClient} from './read_client'; +import {logger} from '../util/logger'; +import { + ArrowRawTransform, + ArrowRecordBatchTransform, + ArrowRecordReaderTransform, +} from './arrow_transform'; +import {ReadSession, GetStreamOptions} from './read_session'; +import {ArrowFormat} from './data_format'; + +type ReadSessionInfo = protos.google.cloud.bigquery.storage.v1.IReadSession; + +/** + * A BigQuery Storage API Reader that can be used to read data + * from BigQuery Tables using the Storage API in Arrow format. + * + * @class + * @memberof reader + */ +export class ArrowTableReader { + private _tableRef: TableReference; + private _session: ReadSession; + + /** + * Creates a new ArrowTableReader instance. Usually created via + * ReadClient.createArrowTableReader(). + * + * @param {ReadClient} readClient - Storage Read Client. + * @param {TableReference} table - target table to read data from. + */ + constructor(readClient: ReadClient, tableRef: TableReference) { + this._tableRef = tableRef; + this._session = new ReadSession(readClient, tableRef, ArrowFormat); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace(msg: string, ...otherArgs: any[]) { + logger( + 'arrow_table_reader', + `[table: ${this._tableRef.tableId}]`, + msg, + ...otherArgs + ); + } + + getSessionInfo(): ReadSessionInfo | undefined | null { + return this._session.getSessionInfo(); + } + + /** + * Get a byte stream of Arrow Record Batch. + * + * @param {GetStreamOptions} options + */ + async getStream( + options?: GetStreamOptions + ): Promise> { + this.trace('getStream', options); + const stream = await this._session.getStream(options); + return stream.pipe(new ArrowRawTransform()) as ResourceStream; + } + + /** + * Get a stream of Arrow RecordBatch objects. + * + * @param {GetStreamOptions} options + */ + async getRecordBatchStream( + options?: GetStreamOptions + ): Promise> { + this.trace('getRecordBatchStream', options); + const stream = await this._session.getStream(options); + const info = this._session.getSessionInfo(); + return stream + .pipe(new ArrowRawTransform()) + .pipe(new ArrowRecordReaderTransform(info!)) + .pipe(new ArrowRecordBatchTransform()) as ResourceStream; + } + + close() { + this._session.close(); + } +} diff --git a/handwritten/bigquery-storage/src/reader/arrow_transform.ts b/handwritten/bigquery-storage/src/reader/arrow_transform.ts new file mode 100644 index 00000000000..ca9e8b4447d --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/arrow_transform.ts @@ -0,0 +1,176 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Transform, TransformCallback} from 'stream'; +import { + RecordBatchReader, + RecordBatch, + RecordBatchStreamReader, + Vector, +} from 'apache-arrow'; +import * as protos from '../../protos/protos'; + +type ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; +type ReadSession = protos.google.cloud.bigquery.storage.v1.IReadSession; + +interface TableCell { + v?: any; +} +interface TableRow { + f?: Array; +} + +/** + * ArrowRawTransform implements a node stream Transform that reads + * ReadRowsResponse from BigQuery Storage Read API and convert + * a raw Arrow Record Batch. + */ +export class ArrowRawTransform extends Transform { + constructor() { + super({ + readableObjectMode: false, + writableObjectMode: true, + }); + } + + _transform( + response: ReadRowsResponse, + _: BufferEncoding, + callback: TransformCallback + ): void { + if ( + !( + response.arrowRecordBatch && + response.arrowRecordBatch.serializedRecordBatch + ) + ) { + callback(null); + return; + } + callback(null, response.arrowRecordBatch?.serializedRecordBatch); + } +} + +/** + * ArrowRecordReaderTransform implements a node stream Transform that reads + * a byte stream of raw Arrow Record Batch and convert to a stream of Arrow + * RecordBatchStreamReader. + */ +export class ArrowRecordReaderTransform extends Transform { + private session: ReadSession; + + constructor(session: ReadSession) { + super({ + objectMode: true, + }); + this.session = session; + } + + _transform( + serializedRecordBatch: Uint8Array, + _: BufferEncoding, + callback: TransformCallback + ): void { + const buf = Buffer.concat([ + this.session.arrowSchema?.serializedSchema as Uint8Array, + serializedRecordBatch, + ]); + const reader = RecordBatchReader.from(buf); + callback(null, reader); + } +} + +/** + * ArrowRecordBatchTransform implements a node stream Transform that reads + * a RecordBatchStreamReader and convert a stream of Arrow RecordBatch. + */ +export class ArrowRecordBatchTransform extends Transform { + constructor() { + super({ + objectMode: true, + }); + } + + _transform( + reader: RecordBatchStreamReader, + _: BufferEncoding, + callback: TransformCallback + ): void { + const batches = reader.readAll(); + for (const row of batches) { + this.push(row); + } + callback(null); + } +} + +/** + * ArrowRecordBatchTableRowTransform implements a node stream Transform that reads + * an Arrow RecordBatch and convert a stream of BigQuery TableRow. + */ +export class ArrowRecordBatchTableRowTransform extends Transform { + constructor() { + super({ + objectMode: true, + }); + } + + _transform( + batch: RecordBatch, + _: BufferEncoding, + callback: TransformCallback + ): void { + const rows = new Array(batch.numRows); + for (let i = 0; i < batch.numRows; i++) { + rows[i] = { + f: new Array(batch.numCols), + }; + } + for (let j = 0; j < batch.numCols; j++) { + const column = batch.selectAt([j]); + const columnName = column.schema.fields[0].name; + for (let i = 0; i < batch.numRows; i++) { + const fieldData = column.get(i); + const fieldValue = fieldData?.toJSON()[columnName]; + rows[i].f[j] = { + v: convertArrowValue(fieldValue), + }; + } + } + for (let i = 0; i < batch.numRows; i++) { + this.push(rows[i]); + } + callback(null); + } +} + +function convertArrowValue(fieldValue: any): any { + if (typeof fieldValue === 'object') { + if (fieldValue instanceof Vector) { + const arr = fieldValue.toJSON(); + return arr.map((v: any) => { + return {v: convertArrowValue(v)}; + }); + } + const tableRow: TableRow = {f: []}; + Object.keys(fieldValue).forEach(key => { + tableRow.f?.push({ + v: convertArrowValue(fieldValue[key]), + }); + }); + return tableRow; + } + return fieldValue; +} diff --git a/handwritten/bigquery-storage/src/reader/data_format.ts b/handwritten/bigquery-storage/src/reader/data_format.ts new file mode 100644 index 00000000000..d599c471a36 --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/data_format.ts @@ -0,0 +1,33 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as protos from '../../protos/protos'; + +export type DataFormat = + protos.google.cloud.bigquery.storage.v1.IReadSession['dataFormat']; +const DataFormat = protos.google.cloud.bigquery.storage.v1.DataFormat; + +/** + * Return data in Apache Arrow format. + * + * @memberof reader + */ +export const ArrowFormat: DataFormat = 'ARROW'; + +/** + * Return data in Apache Avro format. + * + * @memberof reader + */ +export const AvroFormat: DataFormat = 'AVRO'; diff --git a/handwritten/bigquery-storage/src/reader/index.ts b/handwritten/bigquery-storage/src/reader/index.ts new file mode 100644 index 00000000000..280011fc023 --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/index.ts @@ -0,0 +1,35 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * Package reader provides an EXPERIMENTAL thick client around the + * BigQuery storage API's BigQueryReadClient. + * More information about this new read client may also be found in + * the public documentation: https://cloud.google.com/bigquery/docs/read-api + * + * It is EXPERIMENTAL and subject to change or removal without notice. This is primarily to signal that this + * package may still make breaking changes to existing methods and functionality. + * + * @namespace reader + */ + +export {ReadClient} from './read_client'; +export {TableReader} from './table_reader'; +export {ArrowTableReader} from './arrow_reader'; +export {ReadStream} from './read_stream'; +export {DataFormat, ArrowFormat, AvroFormat} from './data_format'; +export {setLogFunction} from '../util/logger'; + +// polyfill array.at for Node < 14. Remove after Node 14 is deprecated. +import 'core-js/full/array/at'; diff --git a/handwritten/bigquery-storage/src/reader/read_client.ts b/handwritten/bigquery-storage/src/reader/read_client.ts new file mode 100644 index 00000000000..5b89ae1292f --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/read_client.ts @@ -0,0 +1,202 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as gax from 'google-gax'; +import type {CallOptions, ClientOptions} from 'google-gax'; + +import * as protos from '../../protos/protos'; +import {BigQueryReadClient} from '../v1'; +import {ReadStream} from './read_stream'; +import {TableReader} from './table_reader'; +import {ArrowTableReader} from './arrow_reader'; +import {DataFormat} from './data_format'; + +type CreateReadSessionRequest = + protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest; +type ReadSession = protos.google.cloud.bigquery.storage.v1.IReadSession; + +export type TableReference = { + /** + * Required. The ID of the dataset containing this table. + */ + datasetId?: string; + /** + * Required. The ID of the project containing this table. + */ + projectId?: string; + /** + * Required. The ID of the table. The ID can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table ID with a partition decorator, such as `sample_table$20190123`. + */ + tableId?: string; +}; + +/** + * BigQuery Read API Client. + * The Read API can be used to read data to BigQuery. + * + * This class provides the ability to make remote calls to the backing service through method + * calls that map to API methods. + * + * For supplementary information about the Read API, see: + * https://cloud.google.com/bigquery/docs/read-api + * + * @class + * @memberof reader + */ +export class ReadClient { + private _client: BigQueryReadClient; + + constructor(opts?: ClientOptions) { + const baseOptions = { + 'grpc.keepalive_time_ms': 30 * 1000, + 'grpc.keepalive_timeout_ms': 10 * 1000, + 'grpc.use_local_subchannel_pool': 0, + }; + this._client = new BigQueryReadClient({ + ...baseOptions, + ...opts, + }); + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves when auth is complete. + */ + initialize = async (): Promise => { + await this._client.initialize(); + }; + + getClient = (): BigQueryReadClient => { + return this._client; + }; + + setClient = (client: BigQueryReadClient): void => { + this._client = client; + }; + + /** + * Creates a new read session. A read session divides the contents of a + * BigQuery table into one or more streams, which can then be used to read + * data from the table. The read session also specifies properties of the + * data to be read, such as a list of columns or a push-down filter describing + * the rows to be returned. + * + * A particular row can be read by at most one stream. When the caller has + * reached the end of each stream in the session, then all the data in the + * table has been read. + * + * Data is assigned to each stream such that roughly the same number of + * rows can be read from each stream. Because the server-side unit for + * assigning data is collections of rows, the API does not guarantee that + * each stream will return the same number or rows. Additionally, the + * limits are enforced based on the number of pre-filtered rows, so some + * filters can lead to lopsided assignments. + * + * Read sessions automatically expire 6 hours after they are created and do + * not require manual clean-up by the caller. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Parent table that all the streams should belong to, in the form + * of `projects/{project}`. + * @param {string} request.table + * Parent table that all the streams should belong to, in the form + * of `projects/{project}/datasets/{dataset}/tables/{table}`. + * @returns {Promise}} - The promise which resolves to the streamId. + */ + async createReadSession(request: { + parent: string; + table: string; + dataFormat: DataFormat; + selectedFields?: string[]; + }): Promise { + await this.initialize(); + const {table, parent, dataFormat, selectedFields} = request; + const maxWorkerCount = 1; + const maxStreamCount = 0; + const createReq: CreateReadSessionRequest = { + parent, + readSession: { + table, + dataFormat, + readOptions: { + selectedFields: selectedFields, + }, + }, + preferredMinStreamCount: maxWorkerCount, + maxStreamCount: maxStreamCount, + }; + const [response] = await this._client.createReadSession(createReq); + if (typeof [response] === undefined) { + throw new gax.GoogleError(`${response}`); + } + return response; + } + + /** + * Creates a ReadStream to the given stream name and ReadSession. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.streamName + * Required. The id/name of read stream to read from. + * @param {string} request.session + * Required. Reference to the ReadSession. See `createReadSession`. + * @returns {Promise}} - The promise which resolves to the `ReadStream`. + */ + async createReadStream( + request: { + streamName: string; + session: ReadSession; + }, + options?: CallOptions + ): Promise { + await this.initialize(); + const {streamName, session} = request; + try { + const stream = new ReadStream(streamName, session, this, options); + return stream; + } catch (err) { + throw new Error('read stream connection failed:' + err); + } + } + + async createTableReader(params: { + table: TableReference; + }): Promise { + await this.initialize(); + const reader = new TableReader(this, params.table); + return reader; + } + + async createArrowTableReader(params: { + table: TableReference; + }): Promise { + await this.initialize(); + const reader = new ArrowTableReader(this, params.table); + return reader; + } + + close() { + this._client.close(); + } +} diff --git a/handwritten/bigquery-storage/src/reader/read_session.ts b/handwritten/bigquery-storage/src/reader/read_session.ts new file mode 100644 index 00000000000..c08ce9b5fff --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/read_session.ts @@ -0,0 +1,149 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {ResourceStream} from '@google-cloud/paginator'; +import {Readable} from 'stream'; + +import {ReadStream} from './read_stream'; +import * as protos from '../../protos/protos'; +import {TableReference, ReadClient} from './read_client'; +import {DataFormat} from './data_format'; +import {logger} from '../util/logger'; + +type ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; +type ReadSessionInfo = protos.google.cloud.bigquery.storage.v1.IReadSession; +const ReadSessionInfo = protos.google.cloud.bigquery.storage.v1.ReadSession; + +export type GetStreamOptions = { + /** + * Row limit of the table. + */ + maxResults?: number; + /** + * Subset of fields to return, supports select into sub fields. Example: selected_fields = "a,e.d.f"; + */ + selectedFields?: string; +}; + +/** + * A ReadSession represents a Read Session from the BigQuery + * Storage Read API. + * + * Read more on:https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#readsession + * + * @class + * @memberof reader + */ +export class ReadSession { + private _info: ReadSessionInfo | null; + private _tableRef: TableReference; + private _format: DataFormat; + private _readStreams: ReadStream[]; + private _readClient: ReadClient; + + constructor( + readClient: ReadClient, + tableRef: TableReference, + format: DataFormat + ) { + this._info = null; + this._format = format; + this._tableRef = tableRef; + this._readClient = readClient; + this._readStreams = []; + } + + getSessionInfo(): ReadSessionInfo | null { + return this._info; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace(msg: string, ...otherArgs: any[]) { + logger('session', `[session: ${this._info?.name}]`, msg, ...otherArgs); + } + + private async getOrCreateSession( + options?: GetStreamOptions + ): Promise { + if (this._info) { + return this._info; + } + const session = await this._readClient.createReadSession({ + parent: `projects/${this._tableRef.projectId}`, + table: `projects/${this._tableRef.projectId}/datasets/${this._tableRef.datasetId}/tables/${this._tableRef.tableId}`, + dataFormat: this._format, + selectedFields: options?.selectedFields?.split(','), + }); + this.trace( + 'session created', + session.name, + session.streams, + session.estimatedRowCount + ); + this._info = session; + + this._readStreams = []; + for (const readStream of session.streams || []) { + const r = await this._readClient.createReadStream( + { + streamName: readStream.name!, + session, + }, + options + ); + this._readStreams.push(r); + } + return session; + } + + /** + * Get a merged stream of ReadRowsResponse from all ReadStream + * under this ReadSession. + * + * @param {GetStreamOptions} options + */ + async getStream( + options?: GetStreamOptions + ): Promise> { + this.trace('getStream', options); + + await this.getOrCreateSession(options); + + const mergedStream = mergeStreams( + this._readStreams.map(r => { + const stream = r.getRowsStream(); + return stream; + }) + ); + const joined = Readable.from(mergedStream); + this.trace('joined streams', joined); + const stream = joined as ResourceStream; + return stream; + } + + close() { + this._readStreams.forEach(rs => { + rs.close(); + }); + } +} + +async function* mergeStreams(readables: Readable[]) { + for (const readable of readables) { + for await (const chunk of readable) { + yield chunk; + } + } +} diff --git a/handwritten/bigquery-storage/src/reader/read_stream.ts b/handwritten/bigquery-storage/src/reader/read_stream.ts new file mode 100644 index 00000000000..7e528e4e70f --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/read_stream.ts @@ -0,0 +1,190 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as gax from 'google-gax'; +import * as protos from '../../protos/protos'; + +import {ReadClient} from './read_client'; +import {logger} from '../util/logger'; +import {Readable, Transform} from 'stream'; + +type ReadSession = protos.google.cloud.bigquery.storage.v1.IReadSession; +type ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; + +export type RemoveListener = { + off: () => void; +}; + +/** + * ReadStream is responsible for reading data from a GRPC read stream + * connection against the Storage Read API readRows method. + * + * @class + * @extends EventEmitter + * @memberof reader + */ +export class ReadStream { + private _streamName: string; + private _offset: number; + private _readClient: ReadClient; + private _session: ReadSession; + private _readStream: Readable | null; + private _connection: gax.CancellableStream | null; + private _callOptions?: gax.CallOptions; + + constructor( + streamName: string, + session: ReadSession, + readClient: ReadClient, + options?: gax.CallOptions + ) { + this._streamName = streamName; + this._session = session; + this._offset = 0; + this._readClient = readClient; + this._connection = null; + this._readStream = null; + this._callOptions = options; + this.open(); + } + + open() { + if (this.isOpen()) { + this.close(); + } + const client = this._readClient.getClient(); + const connection = client.readRows( + { + readStream: this._streamName, + offset: this._offset, + }, + this._callOptions + ); + this._connection = connection; + const passthrough = new Transform({ + objectMode: true, + transform: (response: ReadRowsResponse, _, callback) => { + this.processReadRowsResponse(response); + callback(null, response); + }, + }); + this._readStream = this._connection.pipe(passthrough); + this._connection.on('error', this.handleError); + this._connection.on('close', () => { + this.trace('connection closed'); + }); + this._connection.on('pause', () => { + this.trace('connection paused'); + }); + this._connection.on('resume', async () => { + this.trace('connection resumed'); + }); + this._connection.on('end', () => { + this.trace('connection ended'); + }); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace(msg: string, ...otherArgs: any[]) { + logger( + 'read_stream', + `[streamName: ${this._streamName}]`, + msg, + ...otherArgs + ); + } + + private handleError = (err: gax.GoogleError) => { + this.trace('on error', err, JSON.stringify(err)); + if (this.isRetryableError(err)) { + this.reconnect(); + return; + } + this._readStream?.destroy(err); + this._readStream = null; + }; + + private isRetryableError(err?: gax.GoogleError | null): boolean { + if (!err) { + return false; + } + const reconnectionErrorCodes = [ + gax.Status.ABORTED, + gax.Status.CANCELLED, + gax.Status.DEADLINE_EXCEEDED, + gax.Status.INTERNAL, + gax.Status.UNAVAILABLE, + ]; + return !!err.code && reconnectionErrorCodes.includes(err.code); + } + + private processReadRowsResponse(response: ReadRowsResponse) { + if (!response.rowCount) { + return; + } + const rowCount = parseInt(response.rowCount as string, 10); + this._offset += rowCount; + } + + /** + * Get the name of the read stream associated with this connection. + */ + getStreamName = (): string => { + return this._streamName; + }; + + getReadSession(): ReadSession { + return this._session; + } + + getRowsStream(): Readable { + return this._readStream!; + } + + /** + * Check if connection is open and ready to read data. + */ + isOpen(): boolean { + if (this._connection) { + return !(this._connection.destroyed || this._connection.closed); + } + return false; + } + + /** + * Reconnect and re-open readRows channel. + */ + reconnect() { + this.trace('reconnect called'); + this.close(); + this.open(); + } + + /** + * Close the read stream connection. + */ + close() { + if (this._connection) { + this._connection.end(); + this._connection.removeAllListeners(); + this._connection.destroy(); + this._connection = null; + } + if (this._readStream) { + this._readStream.destroy(); + this._readStream = null; + } + } +} diff --git a/handwritten/bigquery-storage/src/reader/table_reader.ts b/handwritten/bigquery-storage/src/reader/table_reader.ts new file mode 100644 index 00000000000..8ccef34aac2 --- /dev/null +++ b/handwritten/bigquery-storage/src/reader/table_reader.ts @@ -0,0 +1,136 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {ResourceStream} from '@google-cloud/paginator'; + +import * as protos from '../../protos/protos'; +import {TableReference, ReadClient} from './read_client'; +import {logger} from '../util/logger'; +import {ArrowRecordBatchTableRowTransform} from './arrow_transform'; +import {ArrowTableReader} from './arrow_reader'; + +type ReadSessionInfo = protos.google.cloud.bigquery.storage.v1.IReadSession; + +interface TableCell { + v?: any; +} +interface TableRow { + /** + * Represents a single row in the result set, consisting of one or more fields. + */ + f?: Array; +} +interface TableDataList { + /** + * Rows of results. + */ + rows?: Array; + /** + * Total rows of the entire table. In order to show default value 0 we have to present it as string. + */ + totalRows?: string; +} + +type GetRowsOptions = { + /** + * Row limit of the table. + */ + maxResults?: number; + /** + * Subset of fields to return, supports select into sub fields. Example: selected_fields = "a,e.d.f"; + */ + selectedFields?: string; +}; +type RowsResponse = [any[], ReadSessionInfo | null, TableDataList]; + +/** + * A BigQuery Storage API Reader that can be used to reader data into BigQuery Table + * using the Storage API. + * + * @class + * @memberof reader + */ +export class TableReader { + private _arrowReader: ArrowTableReader; + private _tableRef: TableReference; + + /** + * Creates a new Reader instance. + * + * @param {Object} params - The parameters for the JSONWriter. + * @param {TableReference} params.table - The stream connection + * to the BigQuery streaming insert operation. + */ + constructor(readClient: ReadClient, tableRef: TableReference) { + this._tableRef = tableRef; + this._arrowReader = new ArrowTableReader(readClient, tableRef); + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private trace(msg: string, ...otherArgs: any[]) { + logger( + 'table_reader', + `[table: ${this._tableRef.tableId}]`, + msg, + ...otherArgs + ); + } + + getSessionInfo(): ReadSessionInfo | undefined | null { + return this._arrowReader.getSessionInfo(); + } + + async getRowStream( + options?: GetRowsOptions + ): Promise> { + this.trace('getRowStream', options); + const stream = await this._arrowReader.getRecordBatchStream(options); + return stream.pipe( + new ArrowRecordBatchTableRowTransform() + ) as ResourceStream; + } + + /** + * Retrieves table data as rows in same format + * as tabledata.list BigQuery v2 API. + * Extra parameters returned contain Storage Read API specific information + * like ReadSession and totalRows count. + * + * @param {options} GetRowsOptions + */ + async getRows(options?: GetRowsOptions): Promise { + this.trace('getRows', options); + const stream = await this.getRowStream(options); + const session = this.getSessionInfo(); + return new Promise((resolve, reject) => { + const rows: TableRow[] = []; + stream.on('data', (data: TableRow) => { + rows.push(data); + }); + stream.on('error', err => { + this.trace('reject called on joined stream', err); + reject(err); + }); + stream.on('end', () => { + this.trace('resolve called on joined stream'); + const totalRows = `${session?.estimatedRowCount ?? 0}`; + resolve([rows, session ?? null, {rows, totalRows}]); + }); + }); + } + + close() { + this._arrowReader.close(); + } +} diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 83b83f332c3..3785fd2f6f5 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -27,6 +27,7 @@ describe('📦 pack-n-play test', () => { packageDir: process.cwd(), sample: { description: 'TypeScript user can use the type definitions', + devDependencies: ['@types/web'], ts: readFileSync( './system-test/fixtures/sample/src/index.ts' ).toString(), diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts new file mode 100644 index 00000000000..82fd91a73b0 --- /dev/null +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -0,0 +1,512 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +import * as gax from 'google-gax'; +import * as uuid from 'uuid'; +import * as sinon from 'sinon'; +import {BigQuery, TableRow, TableSchema} from '@google-cloud/bigquery'; +import * as protos from '../protos/protos'; +import * as protobuf from 'protobufjs'; +import {ClientOptions} from 'google-gax'; +import * as customerRecordProtoJson from '../samples/customer_record.json'; +import * as bigquerystorage from '../src'; +import * as reader from '../src/reader'; +import {RecordBatch, Table, tableFromIPC} from 'apache-arrow'; + +type ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; +const {ReadClient, ArrowFormat, AvroFormat} = reader; + +const sandbox = sinon.createSandbox(); +afterEach(() => sandbox.restore()); + +if (process.env.NODE_ENV === 'DEBUG') { + reader.setLogFunction(console.log); +} + +const GCLOUD_TESTS_PREFIX = 'nodejs_bqstorage_system_test'; +const bigquery = new BigQuery(); +const generateUuid = () => + `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); +const datasetId = generateUuid(); + +const sleep = (ms: number) => + new Promise(resolve => { + setTimeout(resolve, ms); + }); + +const root = protobuf.Root.fromJSON(customerRecordProtoJson); +if (!root) { + throw Error('Proto must not be undefined'); +} + +describe('reader.ReaderClient', () => { + let projectId: string; + let parent: string; + let tableRef: string; + let tableId: string; + let bqReadClient: bigquerystorage.BigQueryReadClient; + let clientOptions: ClientOptions; + const schema: TableSchema = { + fields: [ + { + name: 'name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'row_num', + type: 'INTEGER', + mode: 'REQUIRED', + }, + ], + }; + + before(async () => { + await deleteDatasets(); + + await bigquery.createDataset(datasetId); + }); + + beforeEach(async () => { + tableId = generateUuid(); + + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId, {schema}); + + projectId = table.metadata.tableReference.projectId; + + parent = `projects/${projectId}`; + tableRef = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; + + await bigquery + .dataset(datasetId) + .table(tableId) + .insert([ + {name: 'Ada Lovelace', row_num: 1}, + {name: 'Alan Turing', row_num: 2}, + {name: 'Bell', row_num: 3}, + ]); + }); + + after(async () => { + await bigquery.dataset(datasetId).delete({force: true}).catch(console.warn); + }); + + beforeEach(async () => { + clientOptions = { + projectId: projectId, + 'grpc.keepalive_time_ms': 30 * 1000, + 'grpc.keepalive_timeout_ms': 10 * 1000, + }; + bqReadClient = new bigquerystorage.BigQueryReadClient(clientOptions); + }); + + afterEach(async () => { + await bqReadClient.close(); + }); + + describe('Common methods', () => { + it('should create a client without arguments', () => { + const client = new ReadClient(); + assert(client.getClient()); + }); + + it('should create a client with arguments: parent, client, opts', async () => { + const client = new ReadClient(clientOptions); + assert(client.getClient()); + const clientId = await client.getClient().getProjectId(); + assert.strictEqual(clientId, clientOptions.projectId); + }); + }); + + describe('Read', () => { + it('should invoke createReadSession and createReadStream without errors', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const session = await client.createReadSession({ + parent, + table: tableRef, + dataFormat: ArrowFormat, + }); + + assert.equal(session.dataFormat, ArrowFormat); + assert.notEqual(session.streams, null); + assert.equal(session.streams?.length, 1); + + const readStream = session.streams![0]; + const stream = await client.createReadStream({ + session, + streamName: readStream.name!, + }); + const rowStream = stream.getRowsStream(); + + const responses: ReadRowsResponse[] = []; + await new Promise((resolve, reject) => { + rowStream.on('data', (data: ReadRowsResponse) => { + responses.push(data); + }); + rowStream.on('error', reject); + rowStream.on('end', () => { + resolve(null); + }); + }); + + assert.equal(responses.length, 1); + + const res = responses[0]; + assert.equal(stream['_offset'], res.rowCount); + stream.close(); + } finally { + client.close(); + } + }); + }); + + describe('ArrowTableReader', () => { + it('should allow to read a table as an Arrow byte stream', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createArrowTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + + const rawStream = await reader.getStream(); + + const session = reader.getSessionInfo(); + assert.notEqual(session, null); + assert.equal(session?.dataFormat, ArrowFormat); + + const content: Buffer = await new Promise((resolve, reject) => { + let serializedSchema: string | Uint8Array = ''; + if (session?.arrowSchema?.serializedSchema) { + serializedSchema = session?.arrowSchema?.serializedSchema; + } + let buf = Buffer.from(serializedSchema); + rawStream.on('data', (data: Uint8Array) => { + buf = Buffer.concat([buf, data]); + }); + rawStream.on('error', reject); + rawStream.on('end', () => { + resolve(buf); + }); + }); + const table = await tableFromIPC(content); + + assert.equal(table.numRows, 3); + assert.equal(table.numCols, 2); + + reader.close(); + } finally { + client.close(); + } + }); + + it('should allow to read a table as a stream of Arrow Record Batches', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createArrowTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + + const recordBatchStream = await reader.getRecordBatchStream(); + + const session = reader.getSessionInfo(); + assert.notEqual(session, null); + assert.equal(session?.dataFormat, ArrowFormat); + + const batches: RecordBatch[] = []; + for await (const batch of recordBatchStream) { + batches.push(batch); + } + const table = new Table(batches); + + assert.equal(table.numRows, 3); + assert.equal(table.numCols, 2); + + reader.close(); + } finally { + client.close(); + } + }); + }); + + describe('TableReader', () => { + it('should allow to read a table as a stream', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + + const rowStream = await reader.getRowStream(); + const rows: TableRow[] = []; + await new Promise((resolve, reject) => { + rowStream.on('data', (data: TableRow) => { + rows.push(data); + }); + rowStream.on('error', reject); + rowStream.on('end', () => { + resolve(null); + }); + }); + + const session = reader.getSessionInfo(); + assert.notEqual(session, null); + assert.equal(session?.dataFormat, ArrowFormat); + + assert.equal(rows.length, 3); + + reader.close(); + } finally { + client.close(); + } + }); + + it('should allow to read a table as tabledata.list RowsResponse', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + + const [rows, session, response] = await reader.getRows(); + + assert.notEqual(session, null); + assert.equal(session?.dataFormat, ArrowFormat); + + assert.notEqual(response.totalRows, null); // estimated row count + assert.equal(response.rows?.length, 3); + + assert.equal(rows.length, 3); + + reader.close(); + } finally { + client.close(); + } + }); + + it('should allow to read a table with long running query', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const genTableId = generateUuid(); + await bigquery.query( + `CREATE TABLE ${projectId}.${datasetId}.${genTableId} AS SELECT num FROM UNNEST(GENERATE_ARRAY(1,1000000)) as num` + ); + const reader = await client.createTableReader({ + table: { + projectId, + datasetId, + tableId: genTableId, + }, + }); + + const [rows, session, response] = await reader.getRows(); + + assert.notEqual(session, null); + assert.equal(session?.dataFormat, ArrowFormat); + + assert.notEqual(response.totalRows, null); // estimated row count + assert.equal(response.rows?.length, 1000000); + + assert.equal(rows.length, 1000000); + + reader.close(); + } finally { + client.close(); + } + }).timeout(30 * 1000); + }); + + describe('Error Scenarios', () => { + it('send request with mismatched selected fields', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + + let foundError: gax.GoogleError | null = null; + try { + const rowStream = await reader.getRowStream({ + selectedFields: 'wrong_field', + }); + const rows: TableRow[] = []; + for await (const data of rowStream) { + rows.push(data); + } + } catch (err) { + assert.notEqual(err, null); + foundError = err as gax.GoogleError; + } + + assert.notEqual(foundError, null); + assert.equal(foundError?.code, gax.Status.INVALID_ARGUMENT); + assert.equal( + foundError?.message.includes( + 'request failed: The following selected fields do not exist in the table schema: wrong_field' + ), + true + ); + + reader.close(); + } finally { + client.close(); + } + }); + + it('should trigger reconnection when intermitent error happens', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const reader = await client.createTableReader({ + table: { + projectId, + datasetId, + tableId, + }, + }); + await reader.getRowStream(); + + // access private stream connection + const stream = reader['_arrowReader']['_session']['_readStreams'][0]; + let reconnectedCalled = false; + sandbox.stub(stream, 'reconnect').callsFake(() => { + reconnectedCalled = true; + }); + const conn = stream['_connection'] as gax.CancellableStream; // private method + + const gerr = new gax.GoogleError('aborted'); + gerr.code = gax.Status.ABORTED; + conn.emit('error', gerr); + conn.emit('close'); + + assert.equal(reconnectedCalled, true); + } finally { + client.close(); + } + }); + }); + + describe('close', () => { + it('should invoke close without errors', async () => { + bqReadClient.initialize(); + const client = new ReadClient(); + client.setClient(bqReadClient); + + try { + const session = await client.createReadSession({ + parent: `projects/${projectId}`, + table: `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`, + dataFormat: AvroFormat, + }); + + assert.equal(session.dataFormat, AvroFormat); + assert.notEqual(session.streams, null); + assert.notEqual(session.streams?.length, 0); + + const readStream = session.streams![0]; + const connection = await client.createReadStream({ + session, + streamName: readStream.name!, + }); + await sleep(100); + + const internalConn = connection['_connection']!; + + connection.close(); + assert.strictEqual(internalConn.destroyed, true); + + client.close(); + } finally { + client.close(); + } + }); + }); + + // Only delete a resource if it is older than 24 hours. That will prevent + // collisions with parallel CI test runs. + function isResourceStale(creationTime: number) { + const oneDayMs = 86400000; + const now = new Date(); + const created = new Date(creationTime); + return now.getTime() - created.getTime() >= oneDayMs; + } + + async function deleteDatasets() { + let [datasets] = await bigquery.getDatasets(); + datasets = datasets.filter(dataset => + dataset.id?.includes(GCLOUD_TESTS_PREFIX) + ); + + for (const dataset of datasets) { + const [metadata] = await dataset.getMetadata(); + const creationTime = Number(metadata.creationTime); + if (isResourceStale(creationTime)) { + try { + await dataset.delete({force: true}); + } catch (e) { + console.log(`dataset(${dataset.id}).delete() failed`); + console.log(e); + } + } + } + } +}); diff --git a/handwritten/bigquery-storage/test/reader/arrow_transform.ts b/handwritten/bigquery-storage/test/reader/arrow_transform.ts new file mode 100644 index 00000000000..e8081b40acd --- /dev/null +++ b/handwritten/bigquery-storage/test/reader/arrow_transform.ts @@ -0,0 +1,129 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {describe, it} from 'mocha'; +import * as protos from '../../protos/protos'; +import {RecordBatchStreamWriter, tableFromArrays} from 'apache-arrow'; +import {Readable} from 'stream'; +import { + ArrowRawTransform, + ArrowRecordBatchTableRowTransform, + ArrowRecordBatchTransform, + ArrowRecordReaderTransform, +} from '../../src/reader/arrow_transform'; +import {BigQuery} from '@google-cloud/bigquery'; +import bigquery from '@google-cloud/bigquery/build/src/types'; + +type ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; +const ReadRowsResponse = + protos.google.cloud.bigquery.storage.v1.ReadRowsResponse; + +describe('Arrow Transform', () => { + it('Pipeline with all transforms', async () => { + const schema: bigquery.ITableSchema = { + fields: [ + {name: 'name', type: 'STRING'}, + {name: 'row', type: 'INTEGER'}, + {name: 'arr', type: 'INTEGER', mode: 'REPEATED'}, + { + name: 'rec', + type: 'RECORD', + fields: [ + {name: 'key', type: 'STRING'}, + {name: 'value', type: 'STRING'}, + ], + }, + { + name: 'recs', + type: 'RECORD', + mode: 'REPEATED', + fields: [{name: 'num', type: 'INTEGER'}], + }, + ], + }; + const table = tableFromArrays({ + name: ['Ada Lovelace', 'Alan Turing', 'Bell'], + row: [1, 2, 3], + arr: [ + [10, 20], + [20, 30], + [30, 40], + ], + rec: [ + {key: 'foo', value: 'bar'}, + {key: 'test', value: 'baz'}, + {key: 'a key', value: 'a value'}, + ], + recs: [ + [{num: 10}, {num: 20}], + [{num: 20}, {num: 30}], + [{num: 30}, {num: 40}], + ], + }); + const writer = RecordBatchStreamWriter.writeAll(table); + const serializedRecordBatch = writer.toUint8Array(true); + const serializedSchema = Uint8Array.from([]); + const response: ReadRowsResponse = { + arrowSchema: { + serializedSchema, + }, + arrowRecordBatch: { + serializedRecordBatch, + rowCount: table.numRows, + }, + }; + + const pipeline = Readable.from([response]) + .pipe(new ArrowRawTransform()) + .pipe(new ArrowRecordReaderTransform({arrowSchema: {serializedSchema}})) + .pipe(new ArrowRecordBatchTransform()) + .pipe(new ArrowRecordBatchTableRowTransform()); + + const consumeRows = new Promise(resolve => { + const rows: any[] = []; + pipeline + .on('data', data => rows.push(data)) + .on('end', () => resolve(rows)); + }); + const tableRows = await consumeRows; + const rows = BigQuery.mergeSchemaWithRows_(schema, tableRows, { + wrapIntegers: false, + }); + assert.deepStrictEqual(rows, [ + { + name: 'Ada Lovelace', + row: 1, + arr: [10, 20], + rec: {key: 'foo', value: 'bar'}, + recs: [{num: 10}, {num: 20}], + }, + { + name: 'Alan Turing', + row: 2, + arr: [20, 30], + rec: {key: 'test', value: 'baz'}, + recs: [{num: 20}, {num: 30}], + }, + { + name: 'Bell', + row: 3, + arr: [30, 40], + rec: {key: 'a key', value: 'a value'}, + recs: [{num: 30}, {num: 40}], + }, + ]); + }); +}); From 31cdfffe9a18d142d63a6cda9a8322b382352612 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:35:26 -0400 Subject: [PATCH 292/333] chore(main): release 4.10.0 (#479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.10.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 8 ++++++++ handwritten/bigquery-storage/package.json | 2 +- ...snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1alpha.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 7 files changed, 14 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 281c7f25e8c..49b1626f86a 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [4.10.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.9.0...v4.10.0) (2024-09-23) + + +### Features + +* Add BigQuery Metastore Partition Service API version v1alpha ([b0cf365](https://github.com/googleapis/nodejs-bigquery-storage/commit/b0cf365d9dcb47ea40e2222aa7fb966396bff4b9)) +* Add wrapper for reading table data using Storage API ([#431](https://github.com/googleapis/nodejs-bigquery-storage/issues/431)) ([03f2b1f](https://github.com/googleapis/nodejs-bigquery-storage/commit/03f2b1f165f25f831f4a266419f3a6168ef2536c)) + ## [4.9.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.8.0...v4.9.0) (2024-09-12) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 6b68129f383..c1ac39de873 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.9.0", + "version": "4.10.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index e10d9a21daa..c87fde38af4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.9.0", + "version": "4.10.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index e1599cc1957..66c5bff5d75 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.9.0", + "version": "4.10.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index c9a4a2c4fbf..c53a47244c4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.9.0", + "version": "4.10.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 1c0fa7caacb..f0ea6f95541 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.9.0", + "version": "4.10.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 1c0fa7caacb..f0ea6f95541 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.9.0", + "version": "4.10.0", "language": "TYPESCRIPT", "apis": [ { From b26f4fadcdd8bcf6f487b6e0fd2b7779ce36b6da Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 23 Oct 2024 11:19:14 -0400 Subject: [PATCH 293/333] fix: arrow parsing for nested and nullable fields (#519) * fix: arrow parsing for nested and nullable fields * fix: address pr comments * docs: add comment --- .../src/reader/arrow_transform.ts | 44 ++-- .../system-test/reader_client_test.ts | 191 +++++++++++++++++- 2 files changed, 216 insertions(+), 19 deletions(-) diff --git a/handwritten/bigquery-storage/src/reader/arrow_transform.ts b/handwritten/bigquery-storage/src/reader/arrow_transform.ts index ca9e8b4447d..01a3bda371b 100644 --- a/handwritten/bigquery-storage/src/reader/arrow_transform.ts +++ b/handwritten/bigquery-storage/src/reader/arrow_transform.ts @@ -17,7 +17,7 @@ import { RecordBatchReader, RecordBatch, RecordBatchStreamReader, - Vector, + DataType, } from 'apache-arrow'; import * as protos from '../../protos/protos'; @@ -140,12 +140,13 @@ export class ArrowRecordBatchTableRowTransform extends Transform { } for (let j = 0; j < batch.numCols; j++) { const column = batch.selectAt([j]); - const columnName = column.schema.fields[0].name; + const field = column.schema.fields[0]; + const columnName = field.name; for (let i = 0; i < batch.numRows; i++) { const fieldData = column.get(i); const fieldValue = fieldData?.toJSON()[columnName]; rows[i].f[j] = { - v: convertArrowValue(fieldValue), + v: convertArrowValue(fieldValue, field.type as DataType), }; } } @@ -156,21 +157,36 @@ export class ArrowRecordBatchTableRowTransform extends Transform { } } -function convertArrowValue(fieldValue: any): any { - if (typeof fieldValue === 'object') { - if (fieldValue instanceof Vector) { - const arr = fieldValue.toJSON(); - return arr.map((v: any) => { - return {v: convertArrowValue(v)}; - }); - } - const tableRow: TableRow = {f: []}; +function convertArrowValue(fieldValue: any, type: DataType): any { + if (fieldValue === null) { + return null; + } + if (DataType.isList(type)) { + const arr = fieldValue.toJSON(); + return arr.map((v: any) => { + // Arrays/lists in BigQuery have the same datatype for every element + // so getting the first one is all we need + const elemType = type.children[0].type; + return {v: convertArrowValue(v, elemType)}; + }); + } + if (DataType.isStruct(type)) { + const tableRow: TableRow = {}; Object.keys(fieldValue).forEach(key => { - tableRow.f?.push({ - v: convertArrowValue(fieldValue[key]), + const elemType = type.children.find(f => f.name === key); + if (!tableRow.f) { + tableRow.f = []; + } + tableRow.f.push({ + v: convertArrowValue(fieldValue[key], elemType?.type as DataType), }); }); return tableRow; } + if (DataType.isTimestamp(type)) { + // timestamp comes in microsecond, convert to nanoseconds + // to make it compatible with BigQuery.timestamp. + return fieldValue * 1000; + } return fieldValue; } diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts index 82fd91a73b0..36effd487a5 100644 --- a/handwritten/bigquery-storage/system-test/reader_client_test.ts +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -72,6 +72,33 @@ describe('reader.ReaderClient', () => { type: 'INTEGER', mode: 'REQUIRED', }, + { + name: 'optional', + type: 'STRING', + mode: 'NULLABLE', + }, + { + name: 'list', + type: 'INT64', + mode: 'REPEATED', + }, + { + name: 'metadata', + type: 'RECORD', + mode: 'NULLABLE', + fields: [ + { + name: 'created_at', + type: 'TIMESTAMP', + mode: 'REQUIRED', + }, + { + name: 'updated_at', + type: 'TIMESTAMP', + mode: 'NULLABLE', + }, + ], + }, ], }; @@ -97,9 +124,26 @@ describe('reader.ReaderClient', () => { .dataset(datasetId) .table(tableId) .insert([ - {name: 'Ada Lovelace', row_num: 1}, - {name: 'Alan Turing', row_num: 2}, - {name: 'Bell', row_num: 3}, + { + name: 'Ada Lovelace', + row_num: 1, + optional: 'Some data', + list: [1], + metadata: { + created_at: bigquery.timestamp('2020-04-27T18:07:25.356Z'), + updated_at: bigquery.timestamp('2020-04-27T20:07:25.356Z'), + }, + }, + { + name: 'Alan Turing', + row_num: 2, + optional: 'Some other data', + list: [1, 2], + metadata: { + created_at: bigquery.timestamp('2020-04-27T18:07:25.356Z'), + }, + }, + {name: 'Bell', row_num: 3, list: [1, 2, 3]}, ]); }); @@ -218,7 +262,7 @@ describe('reader.ReaderClient', () => { const table = await tableFromIPC(content); assert.equal(table.numRows, 3); - assert.equal(table.numCols, 2); + assert.equal(table.numCols, 5); reader.close(); } finally { @@ -253,7 +297,7 @@ describe('reader.ReaderClient', () => { const table = new Table(batches); assert.equal(table.numRows, 3); - assert.equal(table.numCols, 2); + assert.equal(table.numCols, 5); reader.close(); } finally { @@ -295,6 +339,143 @@ describe('reader.ReaderClient', () => { assert.equal(rows.length, 3); + assert.deepEqual(rows, [ + { + f: [ + { + v: 'Ada Lovelace', + }, + { + v: '1', + }, + { + v: 'Some data', + }, + { + v: [ + { + v: '1', + }, + ], + }, + { + v: { + f: [ + { + v: 1588010845356000, + }, + { + v: 1588018045356000, + }, + ], + }, + }, + ], + }, + { + f: [ + { + v: 'Alan Turing', + }, + { + v: '2', + }, + { + v: 'Some other data', + }, + { + v: [ + { + v: '1', + }, + { + v: '2', + }, + ], + }, + { + v: { + f: [ + { + v: 1588010845356000, + }, + { + v: null, + }, + ], + }, + }, + ], + }, + { + f: [ + { + v: 'Bell', + }, + { + v: '3', + }, + { + v: null, + }, + { + v: [ + { + v: '1', + }, + { + v: '2', + }, + { + v: '3', + }, + ], + }, + { + v: null, + }, + ], + }, + ]); + const mergedRows = BigQuery.mergeSchemaWithRows_(schema, rows, { + wrapIntegers: false, + }); + assert.deepEqual(mergedRows, [ + { + name: 'Ada Lovelace', + row_num: 1, + optional: 'Some data', + list: [1], + metadata: { + created_at: { + value: '2020-04-27T18:07:25.356Z', + }, + updated_at: { + value: '2020-04-27T20:07:25.356Z', + }, + }, + }, + { + name: 'Alan Turing', + row_num: 2, + optional: 'Some other data', + list: [1, 2], + metadata: { + created_at: { + value: '2020-04-27T18:07:25.356Z', + }, + updated_at: null, + }, + }, + { + name: 'Bell', + row_num: 3, + list: [1, 2, 3], + optional: null, + metadata: null, + }, + ]); + reader.close(); } finally { client.close(); From 5c624eb0e36d2944d12d4aab9546affeecc96e89 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 13:40:31 -0400 Subject: [PATCH 294/333] chore: update links in github issue templates (#513) * chore: update links in github issue templates * chore: update links in github issue templates Source-Link: https://github.com/googleapis/synthtool/commit/38fa49fb668c2beb27f598ad3dda2aa46b8a10ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:609822e3c09b7a1bd90b99655904609f162cc15acb4704f1edf778284c36f429 * Delete .github/ISSUE_TEMPLATE/bug_report.md * Delete .github/ISSUE_TEMPLATE/feature_request.md * Delete .github/ISSUE_TEMPLATE/question.md --------- Co-authored-by: Owl Bot Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/ISSUE_TEMPLATE/bug_report.md | 38 ------------------- .../.github/ISSUE_TEMPLATE/bug_report.yml | 12 +++--- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 --------- .../ISSUE_TEMPLATE/processs_request.md | 5 +-- .../.github/ISSUE_TEMPLATE/question.md | 12 ------ .../.github/scripts/close-invalid-link.cjs | 5 ++- 7 files changed, 14 insertions(+), 80 deletions(-) delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 460f67f2b60..24943e1161e 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:a5af6af827a9fffba373151e1453b0498da288024cdd16477900dd42857a42e0 -# created: 2024-09-20T20:26:11.126243246Z + digest: sha256:609822e3c09b7a1bd90b99655904609f162cc15acb4704f1edf778284c36f429 +# created: 2024-10-01T19:34:30.797530443Z diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 0ad95022413..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -labels: 'type: bug, priority: p2' ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -1) Is this a client library issue or a product issue? -This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. - -2) Did someone already solve this? - - Search the issues already opened: https://github.com/googleapis/nodejs-bigquery-storage/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node - - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js - -3) Do you have a support contract? -Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. - -If the support paths suggested above still do not result in a resolution, please provide the following details. - -#### Environment details - - - OS: - - Node.js version: - - npm version: - - `@google-cloud/bigquery-storage` version: - -#### Steps to reproduce - - 1. ? - 2. ? - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml index f11f8e0fabd..c5b86023a0f 100644 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml @@ -24,12 +24,12 @@ body: e.js" required: true - label: "Check our Troubleshooting guide: - https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ - es/troubleshooting" + https://github.com/googleapis/google-cloud-node/blob/main/docs/trou\ + bleshooting.md" required: true - label: "Check our FAQ: - https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ - es/faq" + https://github.com/googleapis/google-cloud-node/blob/main/docs/faq.\ + md" required: true - label: "Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ @@ -55,9 +55,9 @@ body: behavior you are experiencing. If the behavior is the same, it means that you are likely experiencing a bug with the API itself. In that case, please submit an issue to the API team, either by submitting an - issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers, or by + issue in its issue tracker (https://cloud.google.com/support/docs/issue-trackers), or by submitting an issue in its linked tracker in the .repo-metadata.json - file https://b.corp.google.com/savedsearches/559654 + file https://b.corp.google.com/savedsearches/559654 validations: required: true - type: input diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index b0327dfa02e..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library -labels: 'type: feature request, priority: p3' ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md index 9f88fc1f3b7..45682e8f117 100644 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md +++ b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md @@ -1,5 +1,4 @@ --- name: Process Request -about: Submit a process request to the library. Process requests are any requests related to library infrastructure, including CI/CD, publishing, releasing, etc. This issue template should primarily used by internal members. - ---- \ No newline at end of file +about: Submit a process request to the library. Process requests are any requests related to library infrastructure, for example CI/CD, publishing, releasing, broken links. +--- diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index 97323113911..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -name: Question -about: Ask a question -labels: 'type: question, priority: p3' ---- - -Thanks for stopping by to ask us a question! Please make sure to include: -- What you're trying to do -- What code you've already tried -- Any error messages you're getting - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs index ba7d51372ac..d7a3688e755 100644 --- a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs +++ b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs @@ -40,9 +40,12 @@ module.exports = async ({github, context}) => { const isBugTemplate = issue.data.body.includes('Link to the code that reproduces this issue'); if (isBugTemplate) { + console.log(`Issue ${number} is a bug template`) try { - const link = issue.data.body.split('\n')[18].match(/(https?:\/\/g?i?s?t?\.?github.com\/.*)/); + const link = issue.data.body.split('\n')[18].match(/(https?:\/\/(gist\.)?github.com\/.*)/)[0]; + console.log(`Issue ${number} contains this link: ${link}`) const isValidLink = (await fetch(link)).ok; + console.log(`Issue ${number} has a ${isValidLink ? 'valid' : 'invalid'} link`) if (!isValidLink) { await closeIssue(github, owner, repo, number); } From cdbfac3c3401227b8885035744a85415c33a6745 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 15:27:00 -0400 Subject: [PATCH 295/333] chore(main): release 4.10.1 (#521) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 4.10.1 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Alvaro Viebrantz --- handwritten/bigquery-storage/CHANGELOG.md | 7 +++++++ handwritten/bigquery-storage/package.json | 2 +- .../snippet_metadata.google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...pet_metadata_google.cloud.bigquery.storage.v1alpha.json | 2 +- ...pet_metadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...pet_metadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 7 files changed, 13 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 49b1626f86a..67f527f64a0 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.10.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.10.0...v4.10.1) (2024-10-23) + + +### Bug Fixes + +* Arrow parsing for nested and nullable fields ([#519](https://github.com/googleapis/nodejs-bigquery-storage/issues/519)) ([fc052fb](https://github.com/googleapis/nodejs-bigquery-storage/commit/fc052fb8746f0f87e028f009a6eccc78ff8a1bc4)) + ## [4.10.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.9.0...v4.10.0) (2024-09-23) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c1ac39de873..0dfcc10836a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.10.0", + "version": "4.10.1", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index c87fde38af4..6b5ddc8b555 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.0", + "version": "4.10.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 66c5bff5d75..35de86b38c2 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.0", + "version": "4.10.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index c53a47244c4..b8a33f2af15 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.0", + "version": "4.10.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index f0ea6f95541..b6fb4bb39c6 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.0", + "version": "4.10.1", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index f0ea6f95541..b6fb4bb39c6 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.0", + "version": "4.10.1", "language": "TYPESCRIPT", "apis": [ { From 572ef69cb315e6009ccf82c2b5fd86465456d697 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 8 Jan 2025 17:48:17 -0400 Subject: [PATCH 296/333] fix: required table fields with value expression should be proto optional (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: required table fields with value expresssion should be proto optional * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: add documentation around columns with default value expression --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- handwritten/bigquery-storage/src/adapt/proto.ts | 14 +++++++++++++- .../bigquery-storage/src/adapt/proto_mappings.ts | 13 ++++++++++++- handwritten/bigquery-storage/src/adapt/schema.ts | 8 ++++++++ .../system-test/managed_writer_client_test.ts | 10 ++++++---- handwritten/bigquery-storage/test/adapt/proto.ts | 6 ++++++ 7 files changed, 47 insertions(+), 8 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index c258a01a313..903128bf4dc 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 208e6c163b8..babf418c832 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 5ea786bbd23..69ac09791dd 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -61,6 +61,10 @@ const packedTypes: FieldDescriptorProtoType[] = [ * column name doesn't have any valid characters, we generate a placeholder name using * the field number `field{fieldNumber}`. * + * If a column is required, but has a `defaultValueExpression` set, the resulting + * protobuf field will be optional, so the backend service can fill data with the + * given expression when no value is set. + * * @param schema - a BigQuery Storage TableSchema. * @param scope - scope to namespace protobuf structs. * @returns DescriptorProto @@ -87,6 +91,10 @@ export function convertStorageSchemaToProto2Descriptor( * column name doesn't have any valid characters, we generate a placeholder name using * the field number `field{fieldNumber}`. * + * If a column is required, but has a `defaultValueExpression` set, the resulting + * protobuf field will be optional, so the backend service can fill data with the + * given expression when no value is set. + * * @param schema - a Bigquery TableSchema. * @param scope - scope to namespace protobuf structs. * @returns DescriptorProto @@ -319,7 +327,11 @@ function convertTableFieldSchemaToFieldDescriptorProto( if (!type) { throw Error(`table field ${name} missing type`); } - const label = convertModeToLabel(field.mode, useProto3); + const label = convertModeToLabel( + field.mode, + field.defaultValueExpression, + useProto3 + ); let fdp: FieldDescriptorProto; if ( type === TableFieldSchema.Type.STRUCT || diff --git a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts index 28a7f4da870..6d1094c55f8 100644 --- a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts @@ -104,12 +104,23 @@ export const bqModeToFieldLabelMapProto3: Record< export function convertModeToLabel( mode: TableFieldSchema['mode'], + defaultValueExpression: TableFieldSchema['defaultValueExpression'], useProto3: Boolean ): FieldDescriptorProtoLabel | null { if (!mode) { return null; } - return useProto3 + const label = useProto3 ? bqModeToFieldLabelMapProto3[mode] : bqModeToFieldLabelMapProto2[mode]; + if ( + label === FieldDescriptorProto.Label.LABEL_REQUIRED && + defaultValueExpression && + !useProto3 + ) { + // override LABEL_REQUIRED when there is a default value expression + // so the backend can fill the data for the user + return FieldDescriptorProto.Label.LABEL_OPTIONAL; + } + return label; } diff --git a/handwritten/bigquery-storage/src/adapt/schema.ts b/handwritten/bigquery-storage/src/adapt/schema.ts index d2bdf3ea8f2..3a4c565a5e2 100644 --- a/handwritten/bigquery-storage/src/adapt/schema.ts +++ b/handwritten/bigquery-storage/src/adapt/schema.ts @@ -38,6 +38,10 @@ type ITableFieldSchema = { * [Required] The field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a letter or underscore. The maximum length is 300 characters. */ name?: string; + /** + * Optional. A SQL expression to specify the [default value] (https://cloud.google.com/bigquery/docs/default-values) for this field. + */ + defaultValueExpression?: string; /** * [Required] The field data type. Possible values include STRING, BYTES, INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as FLOAT), NUMERIC, BIGNUMERIC, BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME, DATETIME, INTERVAL, RECORD (where RECORD indicates that the field contains a nested schema) or STRUCT (same as RECORD). */ @@ -89,6 +93,10 @@ function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { name: field.name, }; + if (field.defaultValueExpression) { + out.defaultValueExpression = field.defaultValueExpression; + } + if (field.description) { out.description = field.description; } diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index 0b5eaae51ed..bdbfa514333 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1025,6 +1025,7 @@ describe('managedwriter.WriterClient', () => { { name: 'id', type: 'STRING', + mode: 'REQUIRED', defaultValueExpression: 'GENERATE_UUID()', }, { @@ -1081,6 +1082,7 @@ describe('managedwriter.WriterClient', () => { // change MVI config writer.setDefaultMissingValueInterpretation('NULL_VALUE'); writer.setMissingValueInterpretations({ + id: 'DEFAULT_VALUE', updated_at: 'DEFAULT_VALUE', }); @@ -1115,23 +1117,23 @@ describe('managedwriter.WriterClient', () => { assert.strictEqual(rows.length, 4); const first = rows[0]; - assert.notEqual(first.id, null); + assert.notEqual(first.id, ''); assert.notEqual(first.created_at, null); assert.equal(first.updated_at, null); const second = rows[1]; - assert.notEqual(second.id, null); + assert.notEqual(second.id, ''); assert.notEqual(second.created_at, null); assert.equal(second.updated_at, null); // After change on MVI config const third = rows[2]; - assert.equal(third.id, null); + assert.notEqual(third.id, ''); assert.equal(third.created_at, null); assert.notEqual(third.updated_at, null); const forth = rows[3]; - assert.equal(forth.id, null); + assert.notEqual(forth.id, ''); assert.equal(forth.created_at, null); assert.notEqual(forth.updated_at, null); diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 41ae926e597..bf0260fd8a6 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -51,6 +51,12 @@ describe('Adapt Protos', () => { type: 'BOOL', mode: 'REPEATED', }, + { + name: 'id', + type: 'STRING', + mode: 'REQUIRED', + defaultValueExpression: 'GENERATE_UUID()', + }, ], }; const storageSchema = From 424ef532f4e06e789a9308c0b124d7463eb4d4b4 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Thu, 9 Jan 2025 10:58:26 -0400 Subject: [PATCH 297/333] feat: emit connection event for a StreamConnection (#533) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes https://togithub.com/googleapis/nodejs-bigquery-storage/issues/529 🦕 Supersedes https://togithub.com/googleapis/nodejs-bigquery-storage/pull/530 --- .../bigquery-storage/src/managedwriter/stream_connection.ts | 5 +++++ .../system-test/managed_writer_client_test.ts | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index da312f27828..6d0e5de0c3a 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -107,12 +107,15 @@ export class StreamConnection extends EventEmitter { }); this._connection.on('pause', () => { this.trace('connection paused'); + this.emit('pause'); }); this._connection.on('resume', () => { this.trace('connection resumed'); + this.emit('resume'); }); this._connection.on('end', () => { this.trace('connection ended'); + this.emit('end'); }); } @@ -364,6 +367,7 @@ export class StreamConnection extends EventEmitter { ); this.close(); this.open(); + this.emit('reconnect'); } /** @@ -375,6 +379,7 @@ export class StreamConnection extends EventEmitter { } this._connection.end(); this._connection.removeAllListeners(); + this.emit('close'); this._connection.destroy(); this._connection = null; } diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index bdbfa514333..ff3415a1566 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1655,7 +1655,7 @@ describe('managedwriter.WriterClient', () => { }); let reconnectedCalled = false; - sandbox.stub(connection, 'reconnect').callsFake(() => { + connection.on('reconnect', () => { reconnectedCalled = true; }); From 6bc7f72712da8de5f82863d54ec4f860f4c20f3b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 12:06:04 -0400 Subject: [PATCH 298/333] chore(main): release 4.11.0 (#534) --- handwritten/bigquery-storage/CHANGELOG.md | 12 ++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...et_metadata.google.cloud.bigquery.storage.v1.json | 2 +- ...et_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...tadata_google.cloud.bigquery.storage.v1alpha.json | 2 +- ...tadata.google.cloud.bigquery.storage.v1beta1.json | 2 +- ...tadata_google.cloud.bigquery.storage.v1beta1.json | 2 +- 7 files changed, 18 insertions(+), 6 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 67f527f64a0..d3328ad146c 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [4.11.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.10.1...v4.11.0) (2025-01-09) + + +### Features + +* Emit connection event for a StreamConnection ([#533](https://github.com/googleapis/nodejs-bigquery-storage/issues/533)) ([6688ea3](https://github.com/googleapis/nodejs-bigquery-storage/commit/6688ea3e87e7bd992802d643daa1f760c0c4dd92)) + + +### Bug Fixes + +* Required table fields with value expression should be proto optional ([#532](https://github.com/googleapis/nodejs-bigquery-storage/issues/532)) ([f125792](https://github.com/googleapis/nodejs-bigquery-storage/commit/f1257924be9bb257ea9a11448322f8aa711ad47e)) + ## [4.10.1](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.10.0...v4.10.1) (2024-10-23) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 0dfcc10836a..b98e5cca22f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.10.1", + "version": "4.11.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 6b5ddc8b555..68730059e5b 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.1", + "version": "4.11.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 35de86b38c2..57591f2185a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.1", + "version": "4.11.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index b8a33f2af15..1136e6c3321 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.1", + "version": "4.11.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index b6fb4bb39c6..93417409058 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.1", + "version": "4.11.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index b6fb4bb39c6..93417409058 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.10.1", + "version": "4.11.0", "language": "TYPESCRIPT", "apis": [ { From 6e463288c3f39a6fe0a3105a4c54fd33d3243846 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Fri, 4 Apr 2025 10:28:23 -0400 Subject: [PATCH 299/333] chore!: upgrade to node 18 (#542) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add experimental ArrowData type and arrow_data field within AppendRowsRequest PiperOrigin-RevId: 684900949 Source-Link: https://github.com/googleapis/googleapis/commit/b49a983820806fc0d903370c0e75129fe7ae3c7b Source-Link: https://github.com/googleapis/googleapis-gen/commit/34ddd0399386aeb1d4ab5d397a6dcce5908a16f0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzRkZGQwMzk5Mzg2YWViMWQ0YWI1ZDM5N2E2ZGNjZTU5MDhhMTZmMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove redundant gRPC service config file feat: specify retry configuration PiperOrigin-RevId: 712493958 Source-Link: https://github.com/googleapis/googleapis/commit/29aea4190aba664659908ff5e381c830e4752502 Source-Link: https://github.com/googleapis/googleapis-gen/commit/04bcbab4505989e984b1403d438fffc0312144af Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDRiY2JhYjQ1MDU5ODllOTg0YjE0MDNkNDM4ZmZmYzAzMTIxNDRhZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: paging changes for bigquery fix: add x-goog-request params to headers for LRO-polling methods fix: remove extra protos in ESM & capture ESM in headers docs: update comments for a Nodejs stream object PiperOrigin-RevId: 721038181 Source-Link: https://github.com/googleapis/googleapis/commit/331a41aa9b3a0631abe4e1eb5dae67ab9c8bb043 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e81b4c5b5208895f663a24048615a59d5636f415 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTgxYjRjNWI1MjA4ODk1ZjY2M2EyNDA0ODYxNWE1OWQ1NjM2ZjQxNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update copyright year for auto-generated protos PiperOrigin-RevId: 731694023 Source-Link: https://github.com/googleapis/googleapis/commit/12fc2f929e39688aa24ae6fc5e18db3315975a6e Source-Link: https://github.com/googleapis/googleapis-gen/commit/ec9b0414aac5ce611ff57655a4da655bddfff0fe Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZWM5YjA0MTRhYWM1Y2U2MTFmZjU3NjU1YTRkYTY1NWJkZGZmZjBmZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update copyright year for auto-generated protos PiperOrigin-RevId: 731731294 Source-Link: https://github.com/googleapis/googleapis/commit/a2dbd2daf634a4841d98ce2ef77aea8e673e71e5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c28cf034e138ea0987062a338500211b7b4f4362 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzI4Y2YwMzRlMTM4ZWEwOTg3MDYyYTMzODUwMDIxMWI3YjRmNDM2MiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update copyright year for auto-generated protos PiperOrigin-RevId: 732130682 Source-Link: https://github.com/googleapis/googleapis/commit/9415ba048aa587b1b2df2b96fc00aa009c831597 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2905f833756c2b20b3282be84b511e040fe54f33 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjkwNWY4MzM3NTZjMmIyMGIzMjgyYmU4NGI1MTFlMDQwZmU1NGYzMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update typescript gapic generator to 4.8.1 feat: add request/response debug logging to gapics, update templates to gax 5 and node 18 (#1671) fix: add json files to tsconfig templates (#1692) (ba6be1d) PiperOrigin-RevId: 735896588 Source-Link: https://github.com/googleapis/googleapis/commit/3419af786b385118d2dacfd8d99b141a00e9944d Source-Link: https://github.com/googleapis/googleapis-gen/commit/f35ba1142f4e168222327d892b5f6ee908e5d461 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjM1YmExMTQyZjRlMTY4MjIyMzI3ZDg5MmI1ZjZlZTkwOGU1ZDQ2MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: await/catch promises, and update listOperationsAsync return type PiperOrigin-RevId: 738212310 Source-Link: https://github.com/googleapis/googleapis/commit/803b23488149d2d00725edc52d8a6498b6ab0fec Source-Link: https://github.com/googleapis/googleapis-gen/commit/4f44bd2baa8dd5a71ca0cebdb164c3c34341ed87 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGY0NGJkMmJhYThkZDVhNzFjYTBjZWJkYjE2NGMzYzM0MzQxZWQ4NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * run migrate script * resolve compile errors * npm run fix * fix floating promise lint error * add proto compile changes * recreate sofia changes * lint fixes * update sha * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * upgrade pack-n-play * update sha * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 5 +- .../.github/PULL_REQUEST_TEMPLATE.md | 33 +- .../.github/release-trigger.yml | 1 + .../.github/scripts/close-invalid-link.cjs | 41 +- .../.github/scripts/close-unresponsive.cjs | 108 ++--- .../scripts/fixtures/invalidIssueBody.txt | 50 +++ .../scripts/fixtures/validIssueBody.txt | 50 +++ .../validIssueBodyDifferentLinkLocation.txt | 50 +++ .../.github/scripts/package.json | 21 + .../.github/scripts/remove-response-label.cjs | 28 +- .../scripts/tests/close-invalid-link.test.cjs | 86 ++++ .../close-or-remove-response-label.test.cjs | 109 +++++ .../.github/sync-repo-settings.yaml | 4 +- handwritten/bigquery-storage/.jsdoc.js | 4 +- .../bigquery-storage/.kokoro/common.cfg | 2 +- .../continuous/{node14 => node18}/common.cfg | 2 +- .../continuous/{node14 => node18}/lint.cfg | 0 .../{node14 => node18}/samples-test.cfg | 0 .../{node14 => node18}/system-test.cfg | 0 .../continuous/{node14 => node18}/test.cfg | 0 .../presubmit/{node14 => node18}/common.cfg | 2 +- .../{node14 => node18}/samples-test.cfg | 0 .../{node14 => node18}/system-test.cfg | 0 .../presubmit/{node14 => node18}/test.cfg | 0 .../.kokoro/release/docs-devsite.cfg | 2 +- .../bigquery-storage/.kokoro/release/docs.cfg | 2 +- .../bigquery-storage/.kokoro/release/docs.sh | 2 +- .../.kokoro/release/publish.cfg | 2 +- .../bigquery-storage/.kokoro/samples-test.sh | 6 +- .../bigquery-storage/.kokoro/system-test.sh | 2 +- handwritten/bigquery-storage/.kokoro/test.bat | 2 +- handwritten/bigquery-storage/.kokoro/test.sh | 2 +- .../bigquery-storage/.kokoro/trampoline_v2.sh | 2 +- handwritten/bigquery-storage/README.md | 2 +- handwritten/bigquery-storage/package.json | 56 +-- .../cloud/bigquery/storage/v1/arrow.proto | 2 +- .../cloud/bigquery/storage/v1/avro.proto | 2 +- .../cloud/bigquery/storage/v1/protobuf.proto | 2 +- .../cloud/bigquery/storage/v1/storage.proto | 17 +- .../cloud/bigquery/storage/v1/stream.proto | 2 +- .../cloud/bigquery/storage/v1/table.proto | 2 +- .../storage/v1alpha/metastore_partition.proto | 2 +- .../bigquery/storage/v1alpha/partition.proto | 2 +- .../bigquery/storage/v1beta1/arrow.proto | 2 +- .../cloud/bigquery/storage/v1beta1/avro.proto | 2 +- .../storage/v1beta1/read_options.proto | 2 +- .../bigquery/storage/v1beta1/storage.proto | 2 +- .../storage/v1beta1/table_reference.proto | 2 +- .../bigquery-storage/protos/protos.d.ts | 147 ++++++- handwritten/bigquery-storage/protos/protos.js | 276 +++++++++++- .../bigquery-storage/protos/protos.json | 19 +- .../v1/big_query_read.create_read_session.js | 2 +- .../generated/v1/big_query_read.read_rows.js | 2 +- .../v1/big_query_read.split_read_stream.js | 2 +- .../v1/big_query_write.append_rows.js | 7 +- ..._query_write.batch_commit_write_streams.js | 2 +- .../v1/big_query_write.create_write_stream.js | 2 +- .../big_query_write.finalize_write_stream.js | 2 +- .../v1/big_query_write.flush_rows.js | 2 +- .../v1/big_query_write.get_write_stream.js | 2 +- ...data_google.cloud.bigquery.storage.v1.json | 6 +- ...rvice.batch_create_metastore_partitions.js | 2 +- ...rvice.batch_delete_metastore_partitions.js | 2 +- ...rvice.batch_update_metastore_partitions.js | 2 +- ...ition_service.list_metastore_partitions.js | 2 +- ...ion_service.stream_metastore_partitions.js | 2 +- ...orage.batch_create_read_session_streams.js | 2 +- .../big_query_storage.create_read_session.js | 2 +- .../big_query_storage.finalize_stream.js | 2 +- .../v1beta1/big_query_storage.read_rows.js | 2 +- .../big_query_storage.split_read_stream.js | 2 +- .../bigquery-storage/src/adapt/proto.ts | 24 +- .../src/adapt/proto_mappings.ts | 2 +- .../bigquery-storage/src/adapt/schema.ts | 8 +- .../src/adapt/schema_mappings.ts | 2 +- .../src/managedwriter/encoder.ts | 4 +- .../src/managedwriter/error.ts | 2 +- .../src/managedwriter/json_writer.ts | 10 +- .../src/managedwriter/stream_connection.ts | 20 +- .../src/managedwriter/stream_types.ts | 2 +- .../src/managedwriter/writer.ts | 6 +- .../src/managedwriter/writer_client.ts | 20 +- .../src/reader/arrow_reader.ts | 6 +- .../src/reader/arrow_transform.ts | 8 +- .../src/reader/read_client.ts | 6 +- .../src/reader/read_session.ts | 12 +- .../src/reader/read_stream.ts | 6 +- .../src/reader/table_reader.ts | 6 +- .../bigquery-storage/src/util/logger.ts | 2 +- .../src/v1/big_query_read_client.ts | 143 +++++-- .../src/v1/big_query_write_client.ts | 271 +++++++++--- handwritten/bigquery-storage/src/v1/index.ts | 2 +- .../bigquery-storage/src/v1alpha/index.ts | 2 +- .../metastore_partition_service_client.ts | 253 ++++++++--- .../src/v1beta1/big_query_storage_client.ts | 225 +++++++--- .../bigquery-storage/src/v1beta1/index.ts | 2 +- .../system-test/fixtures/sample/src/index.js | 2 +- .../system-test/fixtures/sample/src/index.ts | 2 +- .../bigquery-storage/system-test/install.ts | 7 +- .../system-test/managed_writer_client_test.ts | 131 ++++-- .../system-test/reader_client_test.ts | 54 ++- .../bigquery-storage/test/adapt/proto.ts | 12 +- .../test/gapic_big_query_read_v1.ts | 290 +++++++------ .../test/gapic_big_query_storage_v1beta1.ts | 320 +++++++------- .../test/gapic_big_query_write_v1.ts | 356 ++++++++-------- ...pic_metastore_partition_service_v1alpha.ts | 392 ++++++++++-------- handwritten/bigquery-storage/tsconfig.json | 7 +- 107 files changed, 2688 insertions(+), 1137 deletions(-) create mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt create mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt create mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt create mode 100644 handwritten/bigquery-storage/.github/scripts/package.json create mode 100644 handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs create mode 100644 handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs rename handwritten/bigquery-storage/.kokoro/continuous/{node14 => node18}/common.cfg (89%) rename handwritten/bigquery-storage/.kokoro/continuous/{node14 => node18}/lint.cfg (100%) rename handwritten/bigquery-storage/.kokoro/continuous/{node14 => node18}/samples-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/continuous/{node14 => node18}/system-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/continuous/{node14 => node18}/test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node14 => node18}/common.cfg (89%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node14 => node18}/samples-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node14 => node18}/system-test.cfg (100%) rename handwritten/bigquery-storage/.kokoro/presubmit/{node14 => node18}/test.cfg (100%) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 24943e1161e..d21b9dd5db6 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:609822e3c09b7a1bd90b99655904609f162cc15acb4704f1edf778284c36f429 -# created: 2024-10-01T19:34:30.797530443Z + digest: sha256:7c7ecb30ffac77ebdacd15f4b1c6c888f01c212832b9efd73fbf4bfc1284b7d4 diff --git a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md index 19153139702..15ce116d1fe 100644 --- a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md +++ b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md @@ -1,7 +1,30 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +> Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: + +## Description + +> Please provide a detailed description for the change. +> As much as possible, please try to keep changes separate by purpose. For example, try not to make a one-line bug fix in a feature request, or add an irrelevant README change to a bug fix. + +## Impact + +> What's the impact of this change? + +## Testing + +> Have you added unit and integration tests if necessary? +> Were any tests changed? Are any breaking changes necessary? + +## Additional Information + +> Any additional details that we should be aware of? + +## Checklist + +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) +- [ ] Code coverage does not decrease +- [ ] Appropriate docs were updated +- [ ] Appropriate comments were added, particularly in complex areas or places that require background +- [ ] No new warnings or issues will be generated from this change -Fixes # 🦕 +Fixes #issue_number_goes_here 🦕 diff --git a/handwritten/bigquery-storage/.github/release-trigger.yml b/handwritten/bigquery-storage/.github/release-trigger.yml index d4ca94189e1..521eddda376 100644 --- a/handwritten/bigquery-storage/.github/release-trigger.yml +++ b/handwritten/bigquery-storage/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: nodejs-bigquery-storage \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs index d7a3688e755..fdb51488197 100644 --- a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs +++ b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs @@ -12,21 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. +const fs = require('fs'); +const yaml = require('js-yaml'); +const path = require('path'); +const TEMPLATE_FILE_PATH = path.resolve(__dirname, '../ISSUE_TEMPLATE/bug_report.yml') + async function closeIssue(github, owner, repo, number) { await github.rest.issues.createComment({ owner: owner, repo: repo, issue_number: number, - body: 'Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)' + body: "Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)" }); await github.rest.issues.update({ owner: owner, repo: repo, issue_number: number, - state: 'closed' + state: "closed" }); } -module.exports = async ({github, context}) => { +module.exports = async ({ github, context }) => { const owner = context.repo.owner; const repo = context.repo.repo; const number = context.issue.number; @@ -37,20 +42,32 @@ module.exports = async ({github, context}) => { issue_number: number, }); - const isBugTemplate = issue.data.body.includes('Link to the code that reproduces this issue'); + const yamlData = fs.readFileSync(TEMPLATE_FILE_PATH, 'utf8'); + const obj = yaml.load(yamlData); + const linkMatchingText = (obj.body.find(x => {return x.type === 'input' && x.validations.required === true && x.attributes.label.includes('link')})).attributes.label; + const isBugTemplate = issue.data.body.includes(linkMatchingText); if (isBugTemplate) { console.log(`Issue ${number} is a bug template`) try { - const link = issue.data.body.split('\n')[18].match(/(https?:\/\/(gist\.)?github.com\/.*)/)[0]; - console.log(`Issue ${number} contains this link: ${link}`) - const isValidLink = (await fetch(link)).ok; - console.log(`Issue ${number} has a ${isValidLink ? 'valid' : 'invalid'} link`) - if (!isValidLink) { - await closeIssue(github, owner, repo, number); - } + const text = issue.data.body; + const match = text.indexOf(linkMatchingText); + if (match !== -1) { + const nextLineIndex = text.indexOf('http', match); + if (nextLineIndex == -1) { + await closeIssue(github, owner, repo, number); + return; + } + const link = text.substring(nextLineIndex, text.indexOf('\n', nextLineIndex)); + console.log(`Issue ${number} contains this link: ${link}`); + const isValidLink = (await fetch(link)).ok; + console.log(`Issue ${number} has a ${isValidLink ? "valid" : "invalid"} link`) + if (!isValidLink) { + await closeIssue(github, owner, repo, number); + } + } } catch (err) { await closeIssue(github, owner, repo, number); } } -}; +}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs b/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs index 142dc1265a4..6f81b508fa5 100644 --- a/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs +++ b/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +/// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -13,57 +13,57 @@ // limitations under the License. function labeledEvent(data) { - return data.event === 'labeled' && data.label.name === 'needs more info'; - } - - const numberOfDaysLimit = 15; - const close_message = `This has been closed since a request for information has \ - not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ - requested information is provided.`; - - module.exports = async ({github, context}) => { - const owner = context.repo.owner; - const repo = context.repo.repo; - - const issues = await github.rest.issues.listForRepo({ - owner: owner, - repo: repo, - labels: 'needs more info', - }); - const numbers = issues.data.map((e) => e.number); - - for (const number of numbers) { - const events = await github.paginate( - github.rest.issues.listEventsForTimeline, - { - owner: owner, - repo: repo, - issue_number: number, - }, - (response) => response.data.filter(labeledEvent) - ); - - const latest_response_label = events[events.length - 1]; - - const created_at = new Date(latest_response_label.created_at); - const now = new Date(); - const diff = now - created_at; - const diffDays = diff / (1000 * 60 * 60 * 24); - - if (diffDays > numberOfDaysLimit) { - await github.rest.issues.update({ - owner: owner, - repo: repo, - issue_number: number, - state: 'closed', - }); - - await github.rest.issues.createComment({ - owner: owner, - repo: repo, - issue_number: number, - body: close_message, - }); - } + return data.event === "labeled" && data.label.name === "needs more info"; +} + +const numberOfDaysLimit = 15; +const close_message = `This has been closed since a request for information has \ +not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ +requested information is provided.`; + +module.exports = async ({ github, context }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + + const issues = await github.rest.issues.listForRepo({ + owner: owner, + repo: repo, + labels: "needs more info", + }); + const numbers = issues.data.map((e) => e.number); + + for (const number of numbers) { + const events = await github.paginate( + github.rest.issues.listEventsForTimeline, + { + owner: owner, + repo: repo, + issue_number: number, + }, + (response) => response.data.filter(labeledEvent) + ); + + const latest_response_label = events[events.length - 1]; + + const created_at = new Date(latest_response_label.created_at); + const now = new Date(); + const diff = now - created_at; + const diffDays = diff / (1000 * 60 * 60 * 24); + + if (diffDays > numberOfDaysLimit) { + await github.rest.issues.update({ + owner: owner, + repo: repo, + issue_number: number, + state: "closed", + }); + + await github.rest.issues.createComment({ + owner: owner, + repo: repo, + issue_number: number, + body: close_message, + }); } - }; + } +}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt new file mode 100644 index 00000000000..504bd669022 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. + +not-a-link + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt new file mode 100644 index 00000000000..6e0ace338eb --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. + +https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt new file mode 100644 index 00000000000..984a420e376 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + +### Link to the code that reproduces this issue. A link to a **public** Github Repository with a minimal reproduction. + + +https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json new file mode 100644 index 00000000000..2c2e5207df9 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/package.json @@ -0,0 +1,21 @@ +{ + "name": "tests", + "private": true, + "description": "tests for script", + "scripts": { + "test": "mocha tests/close-invalid-link.test.cjs && mocha tests/close-or-remove-response-label.test.cjs" + }, + "author": "Google Inc.", + "license": "Apache-2.0", + "engines": { + "node": ">=18" + }, + "dependencies": { + "js-yaml": "^4.1.0" + }, + "devDependencies": { + "@octokit/rest": "^19.0.0", + "mocha": "^10.0.0", + "sinon": "^18.0.0" + } +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs b/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs index 887cf349e9d..4a784ddf7a5 100644 --- a/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs +++ b/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs @@ -13,21 +13,21 @@ // limitations under the License. module.exports = async ({ github, context }) => { - const commenter = context.actor; - const issue = await github.rest.issues.get({ + const commenter = context.actor; + const issue = await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const author = issue.data.user.login; + const labels = issue.data.labels.map((e) => e.name); + + if (author === commenter && labels.includes("needs more info")) { + await github.rest.issues.removeLabel({ owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number, + name: "needs more info", }); - const author = issue.data.user.login; - const labels = issue.data.labels.map((e) => e.name); - - if (author === commenter && labels.includes('needs more info')) { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - name: 'needs more info', - }); - } - }; + } +}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs b/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs new file mode 100644 index 00000000000..f63ee89c811 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs @@ -0,0 +1,86 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const { describe, it } = require('mocha'); +const closeInvalidLink = require('../close-invalid-link.cjs'); +const fs = require('fs'); +const sinon = require('sinon'); + +describe('close issues with invalid links', () => { + let octokitStub; + let issuesStub; + + beforeEach(() => { + issuesStub = { + get: sinon.stub(), + createComment: sinon.stub(), + update: sinon.stub(), + }; + octokitStub = { + rest: { + issues: issuesStub, + }, + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('does not do anything if it is not a bug', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: "I'm having a problem with this." } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('does not do anything if it is a bug with an appropriate link', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBody.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('does not do anything if it is a bug with an appropriate link and the template changes', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBodyDifferentLinkLocation.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('closes the issue if the link is invalid', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/invalidIssueBody.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.calledOnce(issuesStub.createComment); + sinon.assert.calledOnce(issuesStub.update); + }); +}); \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs b/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs new file mode 100644 index 00000000000..fb092c53619 --- /dev/null +++ b/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs @@ -0,0 +1,109 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const { describe, it, beforeEach, afterEach } = require('mocha'); +const removeResponseLabel = require('../remove-response-label.cjs'); +const closeUnresponsive = require('../close-unresponsive.cjs'); +const sinon = require('sinon'); + +function getISODateDaysAgo(days) { + const today = new Date(); + const daysAgo = new Date(today.setDate(today.getDate() - days)); + return daysAgo.toISOString(); +} + +describe('close issues or remove needs more info labels', () => { + let octokitStub; + let issuesStub; + let paginateStub; + + beforeEach(() => { + issuesStub = { + listForRepo: sinon.stub(), + update: sinon.stub(), + createComment: sinon.stub(), + get: sinon.stub(), + removeLabel: sinon.stub(), + }; + paginateStub = sinon.stub(); + octokitStub = { + rest: { + issues: issuesStub, + }, + paginate: paginateStub, + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('closes the issue if the OP has not responded within the allotted time and there is a needs-more-info label', async () => { + const context = { owner: 'testOrg', repo: 'testRepo' }; + const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; + const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(16) }]; + + issuesStub.listForRepo.resolves({ data: issuesInRepo }); + paginateStub.resolves(eventsInIssue); + + await closeUnresponsive({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.listForRepo); + sinon.assert.calledOnce(paginateStub); + sinon.assert.calledOnce(issuesStub.update); + sinon.assert.calledOnce(issuesStub.createComment); + }); + + it('does nothing if not enough time has passed and there is a needs-more-info label', async () => { + const context = { owner: 'testOrg', repo: 'testRepo' }; + const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; + const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(14) }]; + + issuesStub.listForRepo.resolves({ data: issuesInRepo }); + paginateStub.resolves(eventsInIssue); + + await closeUnresponsive({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.listForRepo); + sinon.assert.calledOnce(paginateStub); + sinon.assert.notCalled(issuesStub.update); + sinon.assert.notCalled(issuesStub.createComment); + }); + + it('removes the label if OP responded', async () => { + const context = { actor: 'OP', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; + + issuesStub.get.resolves({ data: issueContext }); + + await removeResponseLabel({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.calledOnce(issuesStub.removeLabel); + }); + + it('does not remove the label if author responded', async () => { + const context = { actor: 'repo-maintainer', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; + + issuesStub.get.resolves({ data: issueContext }); + + await removeResponseLabel({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.removeLabel); + }); +}); \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml index b46e4c4d61d..a013376d1cb 100644 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml @@ -8,9 +8,9 @@ branchProtectionRules: - "ci/kokoro: Samples test" - "ci/kokoro: System test" - lint - - test (14) - - test (16) - test (18) + - test (20) + - test (22) - cla/google - windows - OwlBot Post Processor diff --git a/handwritten/bigquery-storage/.jsdoc.js b/handwritten/bigquery-storage/.jsdoc.js index e5beb93de77..a278e26223f 100644 --- a/handwritten/bigquery-storage/.jsdoc.js +++ b/handwritten/bigquery-storage/.jsdoc.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ module.exports = { includePattern: '\\.js$' }, templates: { - copyright: 'Copyright 2024 Google LLC', + copyright: 'Copyright 2025 Google LLC', includeDate: false, sourceFiles: false, systemName: '@google-cloud/bigquery-storage', diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg index 2c6bf4338e5..2339f0880ee 100644 --- a/handwritten/bigquery-storage/.kokoro/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg similarity index 89% rename from handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg index 2c6bf4338e5..2339f0880ee 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node14/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node14/lint.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node14/samples-test.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node14/system-test.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node14/test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/continuous/node14/test.cfg rename to handwritten/bigquery-storage/.kokoro/continuous/node18/test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg similarity index 89% rename from handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg index 2c6bf4338e5..2339f0880ee 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node14/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/presubmit/node14/samples-test.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/presubmit/node14/system-test.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node14/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/test.cfg similarity index 100% rename from handwritten/bigquery-storage/.kokoro/presubmit/node14/test.cfg rename to handwritten/bigquery-storage/.kokoro/presubmit/node18/test.cfg diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg index 8bcc62cc814..8c834f1f2a1 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg index 03ff6056a52..4dc3fc24d2b 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.sh b/handwritten/bigquery-storage/.kokoro/release/docs.sh index 1d8f3f490a5..e9079a60530 100755 --- a/handwritten/bigquery-storage/.kokoro/release/docs.sh +++ b/handwritten/bigquery-storage/.kokoro/release/docs.sh @@ -16,7 +16,7 @@ set -eo pipefail -# build jsdocs (Python is installed on the Node 10 docker image). +# build jsdocs (Python is installed on the Node 18 docker image). if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 93a76e89119..1ab8d3f84b5 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -30,7 +30,7 @@ build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/samples-test.sh b/handwritten/bigquery-storage/.kokoro/samples-test.sh index 8c5d108cb58..528775394e0 100755 --- a/handwritten/bigquery-storage/.kokoro/samples-test.sh +++ b/handwritten/bigquery-storage/.kokoro/samples-test.sh @@ -16,7 +16,9 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=${HOME}/.npm-global +# Ensure the npm global directory is writable, otherwise rebuild `npm` +mkdir -p $NPM_CONFIG_PREFIX +npm config -g ls || npm i -g npm@`npm --version` # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account @@ -56,7 +58,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/system-test.sh b/handwritten/bigquery-storage/.kokoro/system-test.sh index 0b3043d268c..a90d5cfec89 100755 --- a/handwritten/bigquery-storage/.kokoro/system-test.sh +++ b/handwritten/bigquery-storage/.kokoro/system-test.sh @@ -49,7 +49,7 @@ npm run system-test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/test.bat b/handwritten/bigquery-storage/.kokoro/test.bat index 0bb12405231..caf825656c2 100644 --- a/handwritten/bigquery-storage/.kokoro/test.bat +++ b/handwritten/bigquery-storage/.kokoro/test.bat @@ -21,7 +21,7 @@ cd .. @rem we upgrade Node.js in the image: SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm -call nvm use v14.17.3 +call nvm use 18 call which node call npm install || goto :error diff --git a/handwritten/bigquery-storage/.kokoro/test.sh b/handwritten/bigquery-storage/.kokoro/test.sh index 862d478d324..0d9f6392a75 100755 --- a/handwritten/bigquery-storage/.kokoro/test.sh +++ b/handwritten/bigquery-storage/.kokoro/test.sh @@ -39,7 +39,7 @@ npm test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh index 4d03112128a..5d6cfcca528 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh @@ -44,7 +44,7 @@ # the project root. # # Here is an example for running this script. -# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ +# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:18-user \ # TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ # .kokoro/trampoline_v2.sh diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 5b2e00f6acb..97782baa3c9 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -257,7 +257,7 @@ async function bigqueryStorageQuickstart() { do { const decodedData = avroType.decode( data.avroRows.serializedBinaryRows, - pos + pos, ); if (decodedData.value) { diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index b98e5cca22f..8068c8f8abc 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -27,43 +27,43 @@ "precompile": "gts clean" }, "dependencies": { - "@google-cloud/paginator": "^5.0.0", - "apache-arrow": "^14.0.2", - "core-js": "^3.37.1", + "@google-cloud/paginator": "^6.0.0", + "apache-arrow": "^19.0.1", + "core-js": "^3.41.0", "extend": "^3.0.2", - "google-auth-library": "^9.6.3", - "google-gax": "^4.3.1" + "google-auth-library": "^10.0.0-rc.1", + "google-gax": "^5.0.1-rc.0" }, "peerDependencies": { "protobufjs": "^7.2.4" }, "devDependencies": { - "@google-cloud/bigquery": "^7.5.2", + "@google-cloud/bigquery": "^7.9.3", "@types/extend": "^3.0.4", - "@types/mocha": "^9.0.0", - "@types/node": "^20.16.5", - "@types/sinon": "^17.0.0", - "@types/uuid": "^9.0.1", - "c8": "^9.0.0", - "gapic-tools": "^0.4.0", - "gts": "^5.0.0", - "jsdoc": "^4.0.0", + "@types/mocha": "^10.0.10", + "@types/node": "^22.13.14", + "@types/sinon": "^17.0.4", + "@types/uuid": "^10.0.0", + "c8": "^10.1.3", + "gapic-tools": "^1.0.1", + "gts": "^6.0.2", + "jsdoc": "^4.0.4", "jsdoc-fresh": "^3.0.0", "jsdoc-region-tag": "^3.0.0", - "linkinator": "^3.0.0", - "mocha": "^9.2.2", - "null-loader": "^4.0.0", - "pack-n-play": "^2.0.0", - "sinon": "^18.0.0", - "nise": "6.0.0", - "path-to-regexp": "6.3.0", - "ts-loader": "^9.0.0", - "typescript": "^5.5.3", - "uuid": "^9.0.0", - "webpack": "^5.0.0", - "webpack-cli": "^5.0.0" + "linkinator": "^6.1.2", + "mocha": "^11.1.0", + "nise": "^6.1.1", + "null-loader": "^4.0.1", + "pack-n-play": "^3.0.1", + "path-to-regexp": "^8.2.0", + "sinon": "^20.0.0", + "ts-loader": "^9.5.2", + "typescript": "^5.8.2", + "uuid": "^11.1.0", + "webpack": "^5.98.0", + "webpack-cli": "^6.0.1" }, "engines": { - "node": ">=14.0.0" + "node": ">=18" } -} +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index 530d4179d1b..f4f17c3cdf5 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index b104a90638b..ddf7c15ae21 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto index 99a9c77492d..2713e057839 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/protobuf.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index d83dacc465c..c9dc3f3d460 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -422,6 +422,17 @@ message CreateWriteStreamRequest { // The size of a single AppendRowsRequest must be less than 10 MB in size. // Requests larger than this return an error, typically `INVALID_ARGUMENT`. message AppendRowsRequest { + // Arrow schema and data. + // Arrow format is an experimental feature only selected for allowlisted + // customers. + message ArrowData { + // Optional. Arrow Schema used to serialize the data. + ArrowSchema writer_schema = 1; + + // Required. Serialized row data in Arrow format. + ArrowRecordBatch rows = 2; + } + // ProtoData contains the data rows and schema when constructing append // requests. message ProtoData { @@ -510,6 +521,10 @@ message AppendRowsRequest { oneof rows { // Rows in proto format. ProtoData proto_rows = 4; + + // Rows in arrow format. This is an experimental feature only selected for + // allowlisted customers. + ArrowData arrow_rows = 5; } // Id set by client to annotate its identity. Only initial request setting is diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 511eb4047ed..2e52a0732b1 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index 5438fd3f62a..eb75d706725 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto index d34ceed42b0..0c57403e28c 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto index d00b23d52ea..7e9c332db6f 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/partition.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto index b7decf20c4a..378975cf079 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/arrow.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto index 96e268f2f2c..ccb76f2db7e 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/avro.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto index 3f863e71c2b..0fe7d2b3049 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/read_options.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto index 67422bee643..5cd150e38dd 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/storage.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto index 1c194a8066c..99cd5d099c5 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta1/table_reference.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 903128bf4dc..5e917a2b256 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -33,7 +33,7 @@ export namespace google { interface IArrowSchema { /** ArrowSchema serializedSchema */ - serializedSchema?: (Uint8Array|string|null); + serializedSchema?: (Uint8Array|Buffer|string|null); } /** Represents an ArrowSchema. */ @@ -46,7 +46,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1.IArrowSchema); /** ArrowSchema serializedSchema. */ - public serializedSchema: (Uint8Array|string); + public serializedSchema: (Uint8Array|Buffer|string); /** * Creates a new ArrowSchema instance using the specified properties. @@ -130,7 +130,7 @@ export namespace google { interface IArrowRecordBatch { /** ArrowRecordBatch serializedRecordBatch */ - serializedRecordBatch?: (Uint8Array|string|null); + serializedRecordBatch?: (Uint8Array|Buffer|string|null); /** ArrowRecordBatch rowCount */ rowCount?: (number|Long|string|null); @@ -146,7 +146,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1.IArrowRecordBatch); /** ArrowRecordBatch serializedRecordBatch. */ - public serializedRecordBatch: (Uint8Array|string); + public serializedRecordBatch: (Uint8Array|Buffer|string); /** ArrowRecordBatch rowCount. */ public rowCount: (number|Long|string); @@ -437,7 +437,7 @@ export namespace google { interface IAvroRows { /** AvroRows serializedBinaryRows */ - serializedBinaryRows?: (Uint8Array|string|null); + serializedBinaryRows?: (Uint8Array|Buffer|string|null); /** AvroRows rowCount */ rowCount?: (number|Long|string|null); @@ -453,7 +453,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1.IAvroRows); /** AvroRows serializedBinaryRows. */ - public serializedBinaryRows: (Uint8Array|string); + public serializedBinaryRows: (Uint8Array|Buffer|string); /** AvroRows rowCount. */ public rowCount: (number|Long|string); @@ -2048,6 +2048,9 @@ export namespace google { /** AppendRowsRequest protoRows */ protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + /** AppendRowsRequest arrowRows */ + arrowRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData|null); + /** AppendRowsRequest traceId */ traceId?: (string|null); @@ -2076,6 +2079,9 @@ export namespace google { /** AppendRowsRequest protoRows. */ public protoRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null); + /** AppendRowsRequest arrowRows. */ + public arrowRows?: (google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData|null); + /** AppendRowsRequest traceId. */ public traceId: string; @@ -2086,7 +2092,7 @@ export namespace google { public defaultMissingValueInterpretation: (google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|keyof typeof google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation); /** AppendRowsRequest rows. */ - public rows?: "protoRows"; + public rows?: ("protoRows"|"arrowRows"); /** * Creates a new AppendRowsRequest instance using the specified properties. @@ -2168,6 +2174,109 @@ export namespace google { namespace AppendRowsRequest { + /** Properties of an ArrowData. */ + interface IArrowData { + + /** ArrowData writerSchema */ + writerSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ArrowData rows */ + rows?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + } + + /** Represents an ArrowData. */ + class ArrowData implements IArrowData { + + /** + * Constructs a new ArrowData. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData); + + /** ArrowData writerSchema. */ + public writerSchema?: (google.cloud.bigquery.storage.v1.IArrowSchema|null); + + /** ArrowData rows. */ + public rows?: (google.cloud.bigquery.storage.v1.IArrowRecordBatch|null); + + /** + * Creates a new ArrowData instance using the specified properties. + * @param [properties] Properties to set + * @returns ArrowData instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData): google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData; + + /** + * Encodes the specified ArrowData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.verify|verify} messages. + * @param message ArrowData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ArrowData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.verify|verify} messages. + * @param message ArrowData message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ArrowData message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ArrowData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData; + + /** + * Decodes an ArrowData message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ArrowData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData; + + /** + * Verifies an ArrowData message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ArrowData message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ArrowData + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData; + + /** + * Creates a plain object from an ArrowData message. Also converts values to other types if specified. + * @param message ArrowData + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ArrowData to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ArrowData + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** Properties of a ProtoData. */ interface IProtoData { @@ -6720,7 +6829,7 @@ export namespace google { interface IArrowSchema { /** ArrowSchema serializedSchema */ - serializedSchema?: (Uint8Array|string|null); + serializedSchema?: (Uint8Array|Buffer|string|null); } /** Represents an ArrowSchema. */ @@ -6733,7 +6842,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowSchema); /** ArrowSchema serializedSchema. */ - public serializedSchema: (Uint8Array|string); + public serializedSchema: (Uint8Array|Buffer|string); /** * Creates a new ArrowSchema instance using the specified properties. @@ -6817,7 +6926,7 @@ export namespace google { interface IArrowRecordBatch { /** ArrowRecordBatch serializedRecordBatch */ - serializedRecordBatch?: (Uint8Array|string|null); + serializedRecordBatch?: (Uint8Array|Buffer|string|null); /** ArrowRecordBatch rowCount */ rowCount?: (number|Long|string|null); @@ -6833,7 +6942,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1beta1.IArrowRecordBatch); /** ArrowRecordBatch serializedRecordBatch. */ - public serializedRecordBatch: (Uint8Array|string); + public serializedRecordBatch: (Uint8Array|Buffer|string); /** ArrowRecordBatch rowCount. */ public rowCount: (number|Long|string); @@ -7017,7 +7126,7 @@ export namespace google { interface IAvroRows { /** AvroRows serializedBinaryRows */ - serializedBinaryRows?: (Uint8Array|string|null); + serializedBinaryRows?: (Uint8Array|Buffer|string|null); /** AvroRows rowCount */ rowCount?: (number|Long|string|null); @@ -7033,7 +7142,7 @@ export namespace google { constructor(properties?: google.cloud.bigquery.storage.v1beta1.IAvroRows); /** AvroRows serializedBinaryRows. */ - public serializedBinaryRows: (Uint8Array|string); + public serializedBinaryRows: (Uint8Array|Buffer|string); /** AvroRows rowCount. */ public rowCount: (number|Long|string); @@ -12152,7 +12261,7 @@ export namespace google { doubleValue?: (number|null); /** UninterpretedOption stringValue */ - stringValue?: (Uint8Array|string|null); + stringValue?: (Uint8Array|Buffer|string|null); /** UninterpretedOption aggregateValue */ aggregateValue?: (string|null); @@ -12183,7 +12292,7 @@ export namespace google { public doubleValue: number; /** UninterpretedOption stringValue. */ - public stringValue: (Uint8Array|string); + public stringValue: (Uint8Array|Buffer|string); /** UninterpretedOption aggregateValue. */ public aggregateValue: string; @@ -14198,7 +14307,7 @@ export namespace google { interface IBytesValue { /** BytesValue value */ - value?: (Uint8Array|string|null); + value?: (Uint8Array|Buffer|string|null); } /** Represents a BytesValue. */ @@ -14211,7 +14320,7 @@ export namespace google { constructor(properties?: google.protobuf.IBytesValue); /** BytesValue value. */ - public value: (Uint8Array|string); + public value: (Uint8Array|Buffer|string); /** * Creates a new BytesValue instance using the specified properties. @@ -14298,7 +14407,7 @@ export namespace google { type_url?: (string|null); /** Any value */ - value?: (Uint8Array|string|null); + value?: (Uint8Array|Buffer|string|null); } /** Represents an Any. */ @@ -14314,7 +14423,7 @@ export namespace google { public type_url: string; /** Any value. */ - public value: (Uint8Array|string); + public value: (Uint8Array|Buffer|string); /** * Creates a new Any instance using the specified properties. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index babf418c832..a511324a5da 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -4574,6 +4574,7 @@ * @property {string|null} [writeStream] AppendRowsRequest writeStream * @property {google.protobuf.IInt64Value|null} [offset] AppendRowsRequest offset * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IProtoData|null} [protoRows] AppendRowsRequest protoRows + * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData|null} [arrowRows] AppendRowsRequest arrowRows * @property {string|null} [traceId] AppendRowsRequest traceId * @property {Object.|null} [missingValueInterpretations] AppendRowsRequest missingValueInterpretations * @property {google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation|null} [defaultMissingValueInterpretation] AppendRowsRequest defaultMissingValueInterpretation @@ -4619,6 +4620,14 @@ */ AppendRowsRequest.prototype.protoRows = null; + /** + * AppendRowsRequest arrowRows. + * @member {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData|null|undefined} arrowRows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @instance + */ + AppendRowsRequest.prototype.arrowRows = null; + /** * AppendRowsRequest traceId. * @member {string} traceId @@ -4648,12 +4657,12 @@ /** * AppendRowsRequest rows. - * @member {"protoRows"|undefined} rows + * @member {"protoRows"|"arrowRows"|undefined} rows * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest * @instance */ Object.defineProperty(AppendRowsRequest.prototype, "rows", { - get: $util.oneOfGetter($oneOfFields = ["protoRows"]), + get: $util.oneOfGetter($oneOfFields = ["protoRows", "arrowRows"]), set: $util.oneOfSetter($oneOfFields) }); @@ -4687,6 +4696,8 @@ $root.google.protobuf.Int64Value.encode(message.offset, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.protoRows != null && Object.hasOwnProperty.call(message, "protoRows")) $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.encode(message.protoRows, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.arrowRows != null && Object.hasOwnProperty.call(message, "arrowRows")) + $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.encode(message.arrowRows, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.traceId); if (message.missingValueInterpretations != null && Object.hasOwnProperty.call(message, "missingValueInterpretations")) @@ -4740,6 +4751,10 @@ message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.decode(reader, reader.uint32()); break; } + case 5: { + message.arrowRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.decode(reader, reader.uint32()); + break; + } case 6: { message.traceId = reader.string(); break; @@ -4823,6 +4838,16 @@ return "protoRows." + error; } } + if (message.arrowRows != null && message.hasOwnProperty("arrowRows")) { + if (properties.rows === 1) + return "rows: multiple values"; + properties.rows = 1; + { + var error = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.verify(message.arrowRows); + if (error) + return "arrowRows." + error; + } + } if (message.traceId != null && message.hasOwnProperty("traceId")) if (!$util.isString(message.traceId)) return "traceId: string expected"; @@ -4876,6 +4901,11 @@ throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.protoRows: object expected"); message.protoRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData.fromObject(object.protoRows); } + if (object.arrowRows != null) { + if (typeof object.arrowRows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.arrowRows: object expected"); + message.arrowRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.fromObject(object.arrowRows); + } if (object.traceId != null) message.traceId = String(object.traceId); if (object.missingValueInterpretations) { @@ -4957,6 +4987,11 @@ if (options.oneofs) object.rows = "protoRows"; } + if (message.arrowRows != null && message.hasOwnProperty("arrowRows")) { + object.arrowRows = $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.toObject(message.arrowRows, options); + if (options.oneofs) + object.rows = "arrowRows"; + } if (message.traceId != null && message.hasOwnProperty("traceId")) object.traceId = message.traceId; var keys2; @@ -4996,6 +5031,243 @@ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest"; }; + AppendRowsRequest.ArrowData = (function() { + + /** + * Properties of an ArrowData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @interface IArrowData + * @property {google.cloud.bigquery.storage.v1.IArrowSchema|null} [writerSchema] ArrowData writerSchema + * @property {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null} [rows] ArrowData rows + */ + + /** + * Constructs a new ArrowData. + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest + * @classdesc Represents an ArrowData. + * @implements IArrowData + * @constructor + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData=} [properties] Properties to set + */ + function ArrowData(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ArrowData writerSchema. + * @member {google.cloud.bigquery.storage.v1.IArrowSchema|null|undefined} writerSchema + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @instance + */ + ArrowData.prototype.writerSchema = null; + + /** + * ArrowData rows. + * @member {google.cloud.bigquery.storage.v1.IArrowRecordBatch|null|undefined} rows + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @instance + */ + ArrowData.prototype.rows = null; + + /** + * Creates a new ArrowData instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData} ArrowData instance + */ + ArrowData.create = function create(properties) { + return new ArrowData(properties); + }; + + /** + * Encodes the specified ArrowData message. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData} message ArrowData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowData.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.writerSchema != null && Object.hasOwnProperty.call(message, "writerSchema")) + $root.google.cloud.bigquery.storage.v1.ArrowSchema.encode(message.writerSchema, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.rows != null && Object.hasOwnProperty.call(message, "rows")) + $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.encode(message.rows, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ArrowData message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.IArrowData} message ArrowData message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ArrowData.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ArrowData message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData} ArrowData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowData.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); + break; + } + case 2: { + message.rows = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ArrowData message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData} ArrowData + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ArrowData.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ArrowData message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ArrowData.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) { + var error = $root.google.cloud.bigquery.storage.v1.ArrowSchema.verify(message.writerSchema); + if (error) + return "writerSchema." + error; + } + if (message.rows != null && message.hasOwnProperty("rows")) { + var error = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.verify(message.rows); + if (error) + return "rows." + error; + } + return null; + }; + + /** + * Creates an ArrowData message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData} ArrowData + */ + ArrowData.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData) + return object; + var message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData(); + if (object.writerSchema != null) { + if (typeof object.writerSchema !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.writerSchema: object expected"); + message.writerSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.fromObject(object.writerSchema); + } + if (object.rows != null) { + if (typeof object.rows !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData.rows: object expected"); + message.rows = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.fromObject(object.rows); + } + return message; + }; + + /** + * Creates a plain object from an ArrowData message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData} message ArrowData + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ArrowData.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.writerSchema = null; + object.rows = null; + } + if (message.writerSchema != null && message.hasOwnProperty("writerSchema")) + object.writerSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.toObject(message.writerSchema, options); + if (message.rows != null && message.hasOwnProperty("rows")) + object.rows = $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch.toObject(message.rows, options); + return object; + }; + + /** + * Converts this ArrowData to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @instance + * @returns {Object.} JSON object + */ + ArrowData.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ArrowData + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ArrowData.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData"; + }; + + return ArrowData; + })(); + AppendRowsRequest.ProtoData = (function() { /** diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index eaafe219649..10d106ce114 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -502,7 +502,8 @@ "oneofs": { "rows": { "oneof": [ - "protoRows" + "protoRows", + "arrowRows" ] } }, @@ -523,6 +524,10 @@ "type": "ProtoData", "id": 4 }, + "arrowRows": { + "type": "ArrowData", + "id": 5 + }, "traceId": { "type": "string", "id": 6 @@ -541,6 +546,18 @@ } }, "nested": { + "ArrowData": { + "fields": { + "writerSchema": { + "type": "ArrowSchema", + "id": 1 + }, + "rows": { + "type": "ArrowRecordBatch", + "id": 2 + } + } + }, "ProtoData": { "fields": { "writerSchema": { diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js index cf2b960fb0c..d6f53319cfd 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js index 6489af416f6..90f0b63cda7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js index a5679f5f9ca..2ec67f39f57 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js index 1f1cb4d9476..240d6e2a628 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -61,6 +61,11 @@ function main(writeStream) { * Rows in proto format. */ // const protoRows = {} + /** + * Rows in arrow format. This is an experimental feature only selected for + * allowlisted customers. + */ + // const arrowRows = {} /** * Id set by client to annotate its identity. Only initial request setting is * respected. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js index da96822c57c..72d3877721e 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js index d40cd34d21d..022ca1217a4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js index c112283cd7c..f20f6f0a731 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js index 4eb328d0608..8434830cd8c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js index 23813d55847..94785274b88 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 57591f2185a..4aefa926648 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -206,7 +206,7 @@ "segments": [ { "start": 25, - "end": 120, + "end": 125, "type": "FULL" } ], @@ -227,6 +227,10 @@ "name": "proto_rows", "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" }, + { + "name": "arrow_rows", + "type": ".google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData" + }, { "name": "trace_id", "type": "TYPE_STRING" diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js index 05500209f3c..51e8727307d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js index d991e64e34c..1de77bffda5 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js index 51c097328e2..ebe7964749f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js index fd515972b8c..6ec4c6effe7 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js index 995cfb76097..233f9675c13 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js index 511f3008909..ab34427123c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js index 03c79337d1b..7b75eb954cd 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js index c5d36550b95..9f23c6a54db 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js index fadb7b1ea49..8dc6da158b4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js index 2e8ccd036c4..e3fad01c93d 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index 69ac09791dd..d4c2cfbcb0b 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -78,7 +78,7 @@ export function convertStorageSchemaToProto2Descriptor( schema, scope, false, - ...opts + ...opts, ); return normalizeDescriptorSet(fds); } @@ -108,7 +108,7 @@ export function convertStorageSchemaToProto3Descriptor( schema, scope, true, - ...opts + ...opts, ); return normalizeDescriptorSet(fds); } @@ -165,7 +165,7 @@ function convertStorageSchemaToFileDescriptorInternal( const fd = convertStorageSchemaToFileDescriptorInternal( subSchema, currentScope, - useProto3 + useProto3, ); for (const f of fd.file) { if (f.name) { @@ -176,7 +176,7 @@ function convertStorageSchemaToFileDescriptorInternal( field, fNumber, currentScope, - useProto3 + useProto3, ); fields.push(fdp); } else { @@ -184,7 +184,7 @@ function convertStorageSchemaToFileDescriptorInternal( field, fNumber, currentScope, - useProto3 + useProto3, ); fields.push(fdp); } @@ -200,7 +200,7 @@ function convertStorageSchemaToFileDescriptorInternal( }, 991, scope, - useProto3 + useProto3, ); fields.push(fdp); } @@ -213,7 +213,7 @@ function convertStorageSchemaToFileDescriptorInternal( }, 992, scope, - useProto3 + useProto3, ); fields.push(fdp); } @@ -298,7 +298,7 @@ export function normalizeDescriptor(dp: DescriptorProto): DescriptorProto { const normalizedNestedTypes = []; for (const nestedDP of dp.nestedType) { normalizedNestedTypes.push( - normalizeDescriptor(new DescriptorProto(nestedDP)) + normalizeDescriptor(new DescriptorProto(nestedDP)), ); } dp.nestedType = normalizedNestedTypes; @@ -313,7 +313,7 @@ function convertTableFieldSchemaToFieldDescriptorProto( field: TableFieldSchema, fNumber: number, scope: string, - useProto3: boolean + useProto3: boolean, ): FieldDescriptorProto { let name = field.name; if (!name) { @@ -330,7 +330,7 @@ function convertTableFieldSchemaToFieldDescriptorProto( const label = convertModeToLabel( field.mode, field.defaultValueExpression, - useProto3 + useProto3, ); let fdp: FieldDescriptorProto; if ( @@ -423,7 +423,7 @@ export function generatePlaceholderFieldName(fieldName: string): string { function shouldPackType( t: FieldDescriptorProtoType, label: FieldDescriptorProtoLabel | null, - useProto3: boolean + useProto3: boolean, ): boolean | undefined { if (useProto3) { return false; @@ -436,7 +436,7 @@ function shouldPackType( function isProto3Optional( label: FieldDescriptorProtoLabel | null, - useProto3: boolean + useProto3: boolean, ): boolean | undefined { if (!useProto3) { return undefined; diff --git a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts index 6d1094c55f8..d6e7e103790 100644 --- a/handwritten/bigquery-storage/src/adapt/proto_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/proto_mappings.ts @@ -105,7 +105,7 @@ export const bqModeToFieldLabelMapProto3: Record< export function convertModeToLabel( mode: TableFieldSchema['mode'], defaultValueExpression: TableFieldSchema['defaultValueExpression'], - useProto3: Boolean + useProto3: Boolean, ): FieldDescriptorProtoLabel | null { if (!mode) { return null; diff --git a/handwritten/bigquery-storage/src/adapt/schema.ts b/handwritten/bigquery-storage/src/adapt/schema.ts index 3a4c565a5e2..c0c0f3de914 100644 --- a/handwritten/bigquery-storage/src/adapt/schema.ts +++ b/handwritten/bigquery-storage/src/adapt/schema.ts @@ -72,7 +72,7 @@ const StorageTableField = * @return StorageTableSchema */ export function convertBigQuerySchemaToStorageTableSchema( - schema: ITableSchema + schema: ITableSchema, ): StorageTableSchema { const out: StorageTableSchema = {}; for (const field of schema.fields ?? []) { @@ -103,14 +103,14 @@ function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { if (!field.type) { throw Error( - `could not convert field (${field.name}) due to unknown type value: ${field.type}` + `could not convert field (${field.name}) due to unknown type value: ${field.type}`, ); } const ftype = fieldTypeMap[field.type]; if (!ftype) { throw Error( - `could not convert field (${field.name}) due to unknown type value: ${field.type}` + `could not convert field (${field.name}) due to unknown type value: ${field.type}`, ); } out.type = ftype; @@ -132,7 +132,7 @@ function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { const rtype = fieldTypeMap[field.rangeElementType.type]; if (!rtype) { throw Error( - `could not convert range field (${field.name}) due to unknown range element type: ${field.rangeElementType.type}` + `could not convert range field (${field.name}) due to unknown range element type: ${field.rangeElementType.type}`, ); } out.rangeElementType = { diff --git a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts index 87c5e13b122..631010e1533 100644 --- a/handwritten/bigquery-storage/src/adapt/schema_mappings.ts +++ b/handwritten/bigquery-storage/src/adapt/schema_mappings.ts @@ -53,7 +53,7 @@ export const fieldTypeMap: Record = { }; export function normalizeFieldType( - field: StorageTableField + field: StorageTableField, ): StorageTableField['type'] { if (field.type) { const ftype = fieldTypeMap[field.type]; diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts index 72890f5bde1..3963e335fb8 100644 --- a/handwritten/bigquery-storage/src/managedwriter/encoder.ts +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -59,7 +59,7 @@ export class JSONEncoder { */ setProtoDescriptor(protoDescriptor: IDescriptorProto): void { const normalized = normalizeDescriptor( - new DescriptorProto(protoDescriptor) + new DescriptorProto(protoDescriptor), ); this._type = Type.fromDescriptor(normalized); } @@ -121,7 +121,7 @@ export class JSONEncoder { private encodeRowValue( value: JSONValue, key: string, - ptype: protobuf.Type + ptype: protobuf.Type, ): JSONValue | undefined { const pfield = ptype.fields[key]; if (!pfield) { diff --git a/handwritten/bigquery-storage/src/managedwriter/error.ts b/handwritten/bigquery-storage/src/managedwriter/error.ts index bf52eeb1d20..50249d11711 100644 --- a/handwritten/bigquery-storage/src/managedwriter/error.ts +++ b/handwritten/bigquery-storage/src/managedwriter/error.ts @@ -33,7 +33,7 @@ export function parseStorageErrors(err: gax.GoogleError): StorageError[] { err.metadata.get('google.cloud.bigquery.storage.v1.storageerror-bin') ) { const serrors = err.metadata.get( - 'google.cloud.bigquery.storage.v1.storageerror-bin' + 'google.cloud.bigquery.storage.v1.storageerror-bin', ) as Buffer[]; for (const serr of serrors) { const storageError = StorageError.decode(serr); diff --git a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts index 60c9c9d9b03..27fc6070a93 100644 --- a/handwritten/bigquery-storage/src/managedwriter/json_writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/json_writer.ts @@ -70,7 +70,7 @@ export class JSONWriter { private onSchemaUpdated = (schema: TableSchema) => { const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( schema, - 'root' + 'root', ); this.setProtoDescriptor(protoDescriptor); }; @@ -93,10 +93,10 @@ export class JSONWriter { * @param {MissingValueInterpretation} defaultMissingValueInterpretation */ setDefaultMissingValueInterpretation( - defaultMissingValueInterpretation: MissingValueInterpretation + defaultMissingValueInterpretation: MissingValueInterpretation, ) { this._writer.setDefaultMissingValueInterpretation( - defaultMissingValueInterpretation + defaultMissingValueInterpretation, ); } @@ -106,7 +106,7 @@ export class JSONWriter { * @param {MissingValueInterpretationMap} missingValueInterpretations */ setMissingValueInterpretations( - missingValueInterpretations: MissingValueInterpretationMap + missingValueInterpretations: MissingValueInterpretationMap, ) { this._writer.setMissingValueInterpretations(missingValueInterpretations); } @@ -126,7 +126,7 @@ export class JSONWriter { { serializedRows, }, - offsetValue + offsetValue, ); return pw; } diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index 6d0e5de0c3a..e7e7dd1894d 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -62,7 +62,7 @@ export class StreamConnection extends EventEmitter { constructor( streamId: string, writeClient: WriterClient, - options?: gax.CallOptions + options?: gax.CallOptions, ) { super(); this._streamId = streamId; @@ -79,7 +79,7 @@ export class StreamConnection extends EventEmitter { this._lastConnectionError = null; const callOptions = this.resolveCallOptions( this._streamId, - this._callOptions + this._callOptions, ); const client = this._writeClient.getClient(); const connection = client.appendRows(callOptions); @@ -98,7 +98,7 @@ export class StreamConnection extends EventEmitter { this.resendAllPendingWrites(); } else { const err = new gax.GoogleError( - 'Connection failure, please retry the request' + 'Connection failure, please retry the request', ); err.code = gax.Status.UNAVAILABLE; this.ackAllPendingWrites(err); @@ -132,7 +132,7 @@ export class StreamConnection extends EventEmitter { this.trace( 'found request error with pending write', err, - nextPendingWrite + nextPendingWrite, ); this.handleRetry(err); } @@ -177,7 +177,7 @@ export class StreamConnection extends EventEmitter { private resolveCallOptions( streamId: string, - options?: gax.CallOptions + options?: gax.CallOptions, ): gax.CallOptions { const callOptions = options || {}; if (!callOptions.otherArgs) { @@ -232,7 +232,7 @@ export class StreamConnection extends EventEmitter { private registerListener( eventName: string, // eslint-disable-next-line @typescript-eslint/no-explicit-any - listener: (...args: any[]) => void + listener: (...args: any[]) => void, ): RemoveListener { this.addListener(eventName, listener); return { @@ -282,7 +282,7 @@ export class StreamConnection extends EventEmitter { err: Error | null, result?: | protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse - | undefined + | undefined, ) { while (this.hasPendingWrites()) { this.ackNextPendingWrite(err, result); @@ -293,7 +293,7 @@ export class StreamConnection extends EventEmitter { err: Error | null, result?: | protos.google.cloud.bigquery.storage.v1.IAppendRowsResponse - | undefined + | undefined, ) { const pw = this._pendingWrites.pop(); if (pw) { @@ -328,7 +328,7 @@ export class StreamConnection extends EventEmitter { const tries = pw._increaseAttempts(); if (tries > retrySettings.maxRetryAttempts) { pw._markDone( - new Error(`pending write max retries reached: ${tries} attempts`) + new Error(`pending write max retries reached: ${tries} attempts`), ); return; } @@ -363,7 +363,7 @@ export class StreamConnection extends EventEmitter { */ reconnect() { this.trace( - `reconnect called with ${this._pendingWrites.length} pending writes` + `reconnect called with ${this._pendingWrites.length} pending writes`, ); this.close(); this.open(); diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_types.ts b/handwritten/bigquery-storage/src/managedwriter/stream_types.ts index 45728c7df05..a47c896a191 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_types.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_types.ts @@ -57,7 +57,7 @@ export const BufferedStream = 'BUFFERED'; export const PendingStream = 'PENDING'; export function streamTypeToEnum( - streamType: WriteStreamType + streamType: WriteStreamType, ): WriteStream['type'] { switch (streamType) { case WriteStreamType.BUFFERED: diff --git a/handwritten/bigquery-storage/src/managedwriter/writer.ts b/handwritten/bigquery-storage/src/managedwriter/writer.ts index 05c9ce767da..187e6fd9695 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer.ts @@ -150,7 +150,7 @@ export class Writer { * @param {MissingValueInterpretation} defaultMissingValueInterpretation */ setDefaultMissingValueInterpretation( - defaultMissingValueInterpretation: MissingValueInterpretation + defaultMissingValueInterpretation: MissingValueInterpretation, ) { this._defaultMissingValueInterpretation = defaultMissingValueInterpretation; } @@ -161,7 +161,7 @@ export class Writer { * @param {MissingValueInterpretationMap} missingValueInterpretations */ setMissingValueInterpretations( - missingValueInterpretations: MissingValueInterpretationMap + missingValueInterpretations: MissingValueInterpretationMap, ) { this._missingValueInterpretations = missingValueInterpretations; } @@ -175,7 +175,7 @@ export class Writer { **/ appendRows( rows: ProtoData['rows'], - offsetValue?: IInt64Value['value'] + offsetValue?: IInt64Value['value'], ): PendingWrite { let offset: AppendRowRequest['offset']; if (offsetValue !== undefined && offsetValue !== null) { diff --git a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts index 0bbf7d090b0..ce59e55084f 100644 --- a/handwritten/bigquery-storage/src/managedwriter/writer_client.ts +++ b/handwritten/bigquery-storage/src/managedwriter/writer_client.ts @@ -171,7 +171,7 @@ export class WriterClient { streamType: WriteStreamType; destinationTable: string; }, - options?: CallOptions + options?: CallOptions, ): Promise { const stream = await this.createWriteStreamFullResponse(request, options); if (stream.name) { @@ -204,7 +204,7 @@ export class WriterClient { streamType: WriteStreamType; destinationTable: string; }, - options?: CallOptions + options?: CallOptions, ): Promise { await this.initialize(); const {streamType, destinationTable} = request; @@ -241,7 +241,7 @@ export class WriterClient { streamId: string; view?: WriteStreamView; }, - options?: CallOptions + options?: CallOptions, ): Promise { await this.initialize(); const {streamId, view} = request; @@ -281,7 +281,7 @@ export class WriterClient { destinationTable?: string; streamType?: WriteStreamType; }, - options?: CallOptions + options?: CallOptions, ): Promise { await this.initialize(); const {streamId, streamType, destinationTable} = request; @@ -289,12 +289,12 @@ export class WriterClient { const fullStreamId = await this.resolveStreamId( streamId, streamType, - destinationTable + destinationTable, ); const streamConnection = new StreamConnection( fullStreamId, this, - options + options, ); this._connections.connectionList.push(streamConnection); return streamConnection; @@ -306,7 +306,7 @@ export class WriterClient { private async resolveStreamId( streamId?: string, streamType?: WriteStreamType, - destinationTable?: string + destinationTable?: string, ): Promise { if (streamId && streamId !== '') { if (streamId === DefaultStream) { @@ -329,7 +329,7 @@ export class WriterClient { return `${destinationTable}/streams/_default`; } throw new Error( - 'streamId or destinationTable required to create write stream' + 'streamId or destinationTable required to create write stream', ); } @@ -361,7 +361,7 @@ export class WriterClient { * @returns {Promise} - a promise which resolves to an {@link google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse | BatchCommitWriteStreamsResponse}. */ async batchCommitWriteStream( - request: BatchCommitWriteStreamsRequest + request: BatchCommitWriteStreamsRequest, ): Promise { await this.initialize(); const [res] = await this._client.batchCommitWriteStreams(request); @@ -405,7 +405,7 @@ export class WriterClient { * @returns {Promise} - A promise which resolves to a {@link google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse | FinalizeWriteStreamResponse}. */ async finalizeWriteStream( - request: FinalizeWriteStreamRequest + request: FinalizeWriteStreamRequest, ): Promise { await this.initialize(); const [res] = await this._client.finalizeWriteStream(request); diff --git a/handwritten/bigquery-storage/src/reader/arrow_reader.ts b/handwritten/bigquery-storage/src/reader/arrow_reader.ts index 01439769aa4..fb7139f7610 100644 --- a/handwritten/bigquery-storage/src/reader/arrow_reader.ts +++ b/handwritten/bigquery-storage/src/reader/arrow_reader.ts @@ -57,7 +57,7 @@ export class ArrowTableReader { 'arrow_table_reader', `[table: ${this._tableRef.tableId}]`, msg, - ...otherArgs + ...otherArgs, ); } @@ -71,7 +71,7 @@ export class ArrowTableReader { * @param {GetStreamOptions} options */ async getStream( - options?: GetStreamOptions + options?: GetStreamOptions, ): Promise> { this.trace('getStream', options); const stream = await this._session.getStream(options); @@ -84,7 +84,7 @@ export class ArrowTableReader { * @param {GetStreamOptions} options */ async getRecordBatchStream( - options?: GetStreamOptions + options?: GetStreamOptions, ): Promise> { this.trace('getRecordBatchStream', options); const stream = await this._session.getStream(options); diff --git a/handwritten/bigquery-storage/src/reader/arrow_transform.ts b/handwritten/bigquery-storage/src/reader/arrow_transform.ts index 01a3bda371b..2d871fa87d6 100644 --- a/handwritten/bigquery-storage/src/reader/arrow_transform.ts +++ b/handwritten/bigquery-storage/src/reader/arrow_transform.ts @@ -48,7 +48,7 @@ export class ArrowRawTransform extends Transform { _transform( response: ReadRowsResponse, _: BufferEncoding, - callback: TransformCallback + callback: TransformCallback, ): void { if ( !( @@ -81,7 +81,7 @@ export class ArrowRecordReaderTransform extends Transform { _transform( serializedRecordBatch: Uint8Array, _: BufferEncoding, - callback: TransformCallback + callback: TransformCallback, ): void { const buf = Buffer.concat([ this.session.arrowSchema?.serializedSchema as Uint8Array, @@ -106,7 +106,7 @@ export class ArrowRecordBatchTransform extends Transform { _transform( reader: RecordBatchStreamReader, _: BufferEncoding, - callback: TransformCallback + callback: TransformCallback, ): void { const batches = reader.readAll(); for (const row of batches) { @@ -130,7 +130,7 @@ export class ArrowRecordBatchTableRowTransform extends Transform { _transform( batch: RecordBatch, _: BufferEncoding, - callback: TransformCallback + callback: TransformCallback, ): void { const rows = new Array(batch.numRows); for (let i = 0; i < batch.numRows; i++) { diff --git a/handwritten/bigquery-storage/src/reader/read_client.ts b/handwritten/bigquery-storage/src/reader/read_client.ts index 5b89ae1292f..8fdcf7ddaa4 100644 --- a/handwritten/bigquery-storage/src/reader/read_client.ts +++ b/handwritten/bigquery-storage/src/reader/read_client.ts @@ -168,7 +168,7 @@ export class ReadClient { streamName: string; session: ReadSession; }, - options?: CallOptions + options?: CallOptions, ): Promise { await this.initialize(); const {streamName, session} = request; @@ -197,6 +197,8 @@ export class ReadClient { } close() { - this._client.close(); + this._client.close().catch(err => { + throw err; + }); } } diff --git a/handwritten/bigquery-storage/src/reader/read_session.ts b/handwritten/bigquery-storage/src/reader/read_session.ts index c08ce9b5fff..5403ddd63a2 100644 --- a/handwritten/bigquery-storage/src/reader/read_session.ts +++ b/handwritten/bigquery-storage/src/reader/read_session.ts @@ -56,7 +56,7 @@ export class ReadSession { constructor( readClient: ReadClient, tableRef: TableReference, - format: DataFormat + format: DataFormat, ) { this._info = null; this._format = format; @@ -75,7 +75,7 @@ export class ReadSession { } private async getOrCreateSession( - options?: GetStreamOptions + options?: GetStreamOptions, ): Promise { if (this._info) { return this._info; @@ -90,7 +90,7 @@ export class ReadSession { 'session created', session.name, session.streams, - session.estimatedRowCount + session.estimatedRowCount, ); this._info = session; @@ -101,7 +101,7 @@ export class ReadSession { streamName: readStream.name!, session, }, - options + options, ); this._readStreams.push(r); } @@ -115,7 +115,7 @@ export class ReadSession { * @param {GetStreamOptions} options */ async getStream( - options?: GetStreamOptions + options?: GetStreamOptions, ): Promise> { this.trace('getStream', options); @@ -125,7 +125,7 @@ export class ReadSession { this._readStreams.map(r => { const stream = r.getRowsStream(); return stream; - }) + }), ); const joined = Readable.from(mergedStream); this.trace('joined streams', joined); diff --git a/handwritten/bigquery-storage/src/reader/read_stream.ts b/handwritten/bigquery-storage/src/reader/read_stream.ts index 7e528e4e70f..ac2a5842bb8 100644 --- a/handwritten/bigquery-storage/src/reader/read_stream.ts +++ b/handwritten/bigquery-storage/src/reader/read_stream.ts @@ -48,7 +48,7 @@ export class ReadStream { streamName: string, session: ReadSession, readClient: ReadClient, - options?: gax.CallOptions + options?: gax.CallOptions, ) { this._streamName = streamName; this._session = session; @@ -70,7 +70,7 @@ export class ReadStream { readStream: this._streamName, offset: this._offset, }, - this._callOptions + this._callOptions, ); this._connection = connection; const passthrough = new Transform({ @@ -102,7 +102,7 @@ export class ReadStream { 'read_stream', `[streamName: ${this._streamName}]`, msg, - ...otherArgs + ...otherArgs, ); } diff --git a/handwritten/bigquery-storage/src/reader/table_reader.ts b/handwritten/bigquery-storage/src/reader/table_reader.ts index 8ccef34aac2..9edf5a095cd 100644 --- a/handwritten/bigquery-storage/src/reader/table_reader.ts +++ b/handwritten/bigquery-storage/src/reader/table_reader.ts @@ -83,7 +83,7 @@ export class TableReader { 'table_reader', `[table: ${this._tableRef.tableId}]`, msg, - ...otherArgs + ...otherArgs, ); } @@ -92,12 +92,12 @@ export class TableReader { } async getRowStream( - options?: GetRowsOptions + options?: GetRowsOptions, ): Promise> { this.trace('getRowStream', options); const stream = await this._arrowReader.getRecordBatchStream(options); return stream.pipe( - new ArrowRecordBatchTableRowTransform() + new ArrowRecordBatchTableRowTransform(), ) as ResourceStream; } diff --git a/handwritten/bigquery-storage/src/util/logger.ts b/handwritten/bigquery-storage/src/util/logger.ts index d7c02f7aeed..f0f5e912278 100644 --- a/handwritten/bigquery-storage/src/util/logger.ts +++ b/handwritten/bigquery-storage/src/util/logger.ts @@ -30,7 +30,7 @@ export function logger(source: string, msg: string, ...otherArgs: any[]) { const time = new Date().toISOString(); const formattedMsg = util.format( `D ${time} | ${source} | ${msg} |`, - ...otherArgs + ...otherArgs, ); logFunction(formattedMsg); } diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index 12618c432af..c835e27f186 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -53,6 +54,8 @@ export class BigQueryReadClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('bigquery-storage'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -87,7 +90,7 @@ export class BigQueryReadClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -106,7 +109,7 @@ export class BigQueryReadClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryReadClient; @@ -116,7 +119,7 @@ export class BigQueryReadClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -197,19 +200,19 @@ export class BigQueryReadClient { // Create useful helper objects for these. this.pathTemplates = { projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' + 'projects/{project}', ), readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' + 'projects/{project}/locations/{location}/sessions/{session}', ), readStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}', ), tablePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}' + 'projects/{project}/datasets/{dataset}/tables/{table}', ), writeStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}', ), }; @@ -219,7 +222,7 @@ export class BigQueryReadClient { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -228,7 +231,7 @@ export class BigQueryReadClient { 'google.cloud.bigquery.storage.v1.BigQueryRead', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -262,12 +265,12 @@ export class BigQueryReadClient { this.bigQueryReadStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1.BigQueryRead' + 'google.cloud.bigquery.storage.v1.BigQueryRead', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryRead, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -283,13 +286,13 @@ export class BigQueryReadClient { (...args: Array<{}>) => { if (this._terminated) { if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); + const stream = new PassThrough({objectMode: true}); setImmediate(() => { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -301,7 +304,7 @@ export class BigQueryReadClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = this.descriptors.stream[methodName] || undefined; @@ -309,7 +312,7 @@ export class BigQueryReadClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -330,7 +333,7 @@ export class BigQueryReadClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -348,7 +351,7 @@ export class BigQueryReadClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -393,7 +396,7 @@ export class BigQueryReadClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -464,7 +467,7 @@ export class BigQueryReadClient { */ createReadSession( request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IReadSession, @@ -484,7 +487,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): void; createReadSession( request: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, @@ -494,7 +497,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): void; createReadSession( request?: protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest, @@ -513,7 +516,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IReadSession, @@ -539,8 +542,39 @@ export class BigQueryReadClient { this._gaxModule.routingHeader.fromParams({ 'read_session.table': request.readSession!.table ?? '', }); - this.initialize(); - return this.innerApiCalls.createReadSession(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('createReadSession request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IReadSession, + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('createReadSession response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .createReadSession(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateReadSessionRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('createReadSession response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Splits a given `ReadStream` into two `ReadStream` objects. These @@ -579,7 +613,7 @@ export class BigQueryReadClient { */ splitReadStream( request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, @@ -599,7 +633,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): void; splitReadStream( request: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, @@ -609,7 +643,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): void; splitReadStream( request?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest, @@ -628,7 +662,7 @@ export class BigQueryReadClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, @@ -654,8 +688,39 @@ export class BigQueryReadClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.splitReadStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('splitReadStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('splitReadStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .splitReadStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.ISplitReadStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('splitReadStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -686,7 +751,7 @@ export class BigQueryReadClient { */ readRows( request?: protos.google.cloud.bigquery.storage.v1.IReadRowsRequest, - options?: CallOptions + options?: CallOptions, ): gax.CancellableStream { request = request || {}; options = options || {}; @@ -696,7 +761,10 @@ export class BigQueryReadClient { this._gaxModule.routingHeader.fromParams({ read_stream: request.readStream ?? '', }); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('readRows stream %j', options); return this.innerApiCalls.readRows(request, options); } @@ -792,7 +860,7 @@ export class BigQueryReadClient { project: string, location: string, session: string, - stream: string + stream: string, ) { return this.pathTemplates.readStreamPathTemplate.render({ project: project, @@ -912,7 +980,7 @@ export class BigQueryReadClient { project: string, dataset: string, table: string, - stream: string + stream: string, ) { return this.pathTemplates.writeStreamPathTemplate.render({ project: project, @@ -979,6 +1047,7 @@ export class BigQueryReadClient { close(): Promise { if (this.bigQueryReadStub && !this._terminated) { return this.bigQueryReadStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index cd9ed9e78e2..debb8fbac27 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -56,6 +57,8 @@ export class BigQueryWriteClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('bigquery-storage'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -90,7 +93,7 @@ export class BigQueryWriteClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -109,7 +112,7 @@ export class BigQueryWriteClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryWriteClient; @@ -119,7 +122,7 @@ export class BigQueryWriteClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -200,19 +203,19 @@ export class BigQueryWriteClient { // Create useful helper objects for these. this.pathTemplates = { projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' + 'projects/{project}', ), readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' + 'projects/{project}/locations/{location}/sessions/{session}', ), readStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}', ), tablePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}' + 'projects/{project}/datasets/{dataset}/tables/{table}', ), writeStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}' + 'projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}', ), }; @@ -222,7 +225,7 @@ export class BigQueryWriteClient { appendRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -231,7 +234,7 @@ export class BigQueryWriteClient { 'google.cloud.bigquery.storage.v1.BigQueryWrite', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -265,12 +268,12 @@ export class BigQueryWriteClient { this.bigQueryWriteStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1.BigQueryWrite' + 'google.cloud.bigquery.storage.v1.BigQueryWrite', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1.BigQueryWrite, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -289,13 +292,13 @@ export class BigQueryWriteClient { (...args: Array<{}>) => { if (this._terminated) { if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); + const stream = new PassThrough({objectMode: true}); setImmediate(() => { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -307,7 +310,7 @@ export class BigQueryWriteClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = this.descriptors.stream[methodName] || undefined; @@ -315,7 +318,7 @@ export class BigQueryWriteClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -336,7 +339,7 @@ export class BigQueryWriteClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -354,7 +357,7 @@ export class BigQueryWriteClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -400,7 +403,7 @@ export class BigQueryWriteClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -438,7 +441,7 @@ export class BigQueryWriteClient { */ createWriteStream( request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IWriteStream, @@ -458,7 +461,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; createWriteStream( request: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, @@ -468,7 +471,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; createWriteStream( request?: protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest, @@ -487,7 +490,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IWriteStream, @@ -513,8 +516,39 @@ export class BigQueryWriteClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createWriteStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('createWriteStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('createWriteStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .createWriteStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.ICreateWriteStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('createWriteStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets information about a write stream. @@ -538,7 +572,7 @@ export class BigQueryWriteClient { */ getWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IWriteStream, @@ -558,7 +592,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; getWriteStream( request: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, @@ -568,7 +602,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; getWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest, @@ -587,7 +621,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IWriteStream, @@ -613,8 +647,39 @@ export class BigQueryWriteClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getWriteStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getWriteStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IWriteStream, + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getWriteStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getWriteStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IWriteStream, + ( + | protos.google.cloud.bigquery.storage.v1.IGetWriteStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getWriteStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Finalize a write stream so that no new data can be appended to the @@ -636,7 +701,7 @@ export class BigQueryWriteClient { */ finalizeWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, @@ -656,7 +721,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; finalizeWriteStream( request: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, @@ -666,7 +731,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; finalizeWriteStream( request?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest, @@ -685,7 +750,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, @@ -711,8 +776,39 @@ export class BigQueryWriteClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.finalizeWriteStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('finalizeWriteStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('finalizeWriteStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .finalizeWriteStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('finalizeWriteStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Atomically commits a group of `PENDING` streams that belong to the same @@ -740,7 +836,7 @@ export class BigQueryWriteClient { */ batchCommitWriteStreams( request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, @@ -760,7 +856,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCommitWriteStreams( request: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, @@ -770,7 +866,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCommitWriteStreams( request?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest, @@ -789,7 +885,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, @@ -815,12 +911,39 @@ export class BigQueryWriteClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.batchCommitWriteStreams( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchCommitWriteStreams request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('batchCommitWriteStreams response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchCommitWriteStreams(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('batchCommitWriteStreams response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Flushes rows to a BUFFERED stream. @@ -850,7 +973,7 @@ export class BigQueryWriteClient { */ flushRows( request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, @@ -867,7 +990,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; flushRows( request: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, @@ -877,7 +1000,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): void; flushRows( request?: protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest, @@ -896,7 +1019,7 @@ export class BigQueryWriteClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, @@ -919,8 +1042,36 @@ export class BigQueryWriteClient { this._gaxModule.routingHeader.fromParams({ write_stream: request.writeStream ?? '', }); - this.initialize(); - return this.innerApiCalls.flushRows(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('flushRows request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + | protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('flushRows response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .flushRows(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse, + protos.google.cloud.bigquery.storage.v1.IFlushRowsRequest | undefined, + {} | undefined, + ]) => { + this._log.info('flushRows response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -968,7 +1119,10 @@ export class BigQueryWriteClient { * region_tag:bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async */ appendRows(options?: CallOptions): gax.CancellableStream { - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('appendRows stream %j', options); return this.innerApiCalls.appendRows(null, options); } @@ -1064,7 +1218,7 @@ export class BigQueryWriteClient { project: string, location: string, session: string, - stream: string + stream: string, ) { return this.pathTemplates.readStreamPathTemplate.render({ project: project, @@ -1184,7 +1338,7 @@ export class BigQueryWriteClient { project: string, dataset: string, table: string, - stream: string + stream: string, ) { return this.pathTemplates.writeStreamPathTemplate.render({ project: project, @@ -1251,6 +1405,7 @@ export class BigQueryWriteClient { close(): Promise { if (this.bigQueryWriteStub && !this._terminated) { return this.bigQueryWriteStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1/index.ts b/handwritten/bigquery-storage/src/v1/index.ts index 3fc000e2add..4ef2dcd2641 100644 --- a/handwritten/bigquery-storage/src/v1/index.ts +++ b/handwritten/bigquery-storage/src/v1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1alpha/index.ts b/handwritten/bigquery-storage/src/v1alpha/index.ts index 1f399cceb88..c934f7b7787 100644 --- a/handwritten/bigquery-storage/src/v1alpha/index.ts +++ b/handwritten/bigquery-storage/src/v1alpha/index.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts index ac5361efb89..80a367b70cc 100644 --- a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -53,6 +54,8 @@ export class MetastorePartitionServiceClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('storage'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -87,7 +90,7 @@ export class MetastorePartitionServiceClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -106,7 +109,7 @@ export class MetastorePartitionServiceClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this @@ -117,7 +120,7 @@ export class MetastorePartitionServiceClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -201,10 +204,10 @@ export class MetastorePartitionServiceClient { // Create useful helper objects for these. this.pathTemplates = { readStreamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}' + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}', ), tablePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/datasets/{dataset}/tables/{table}' + 'projects/{project}/datasets/{dataset}/tables/{table}', ), }; @@ -214,7 +217,7 @@ export class MetastorePartitionServiceClient { streamMetastorePartitions: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -223,7 +226,7 @@ export class MetastorePartitionServiceClient { 'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -257,13 +260,13 @@ export class MetastorePartitionServiceClient { this.metastorePartitionServiceStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService' + 'google.cloud.bigquery.storage.v1alpha.MetastorePartitionService', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1alpha .MetastorePartitionService, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -281,13 +284,13 @@ export class MetastorePartitionServiceClient { (...args: Array<{}>) => { if (this._terminated) { if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); + const stream = new PassThrough({objectMode: true}); setImmediate(() => { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -299,7 +302,7 @@ export class MetastorePartitionServiceClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = this.descriptors.stream[methodName] || undefined; @@ -307,7 +310,7 @@ export class MetastorePartitionServiceClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -328,7 +331,7 @@ export class MetastorePartitionServiceClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -346,7 +349,7 @@ export class MetastorePartitionServiceClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -391,7 +394,7 @@ export class MetastorePartitionServiceClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -431,7 +434,7 @@ export class MetastorePartitionServiceClient { */ batchCreateMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, @@ -451,7 +454,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCreateMetastorePartitions( request: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, @@ -461,7 +464,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCreateMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest, @@ -480,7 +483,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, @@ -506,12 +509,45 @@ export class MetastorePartitionServiceClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.batchCreateMetastorePartitions( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchCreateMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchCreateMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchCreateMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchCreateMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); } /** * Deletes metastore partitions from a table. @@ -536,7 +572,7 @@ export class MetastorePartitionServiceClient { */ batchDeleteMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -556,7 +592,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchDeleteMetastorePartitions( request: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, @@ -566,7 +602,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchDeleteMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest, @@ -585,7 +621,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -611,12 +647,45 @@ export class MetastorePartitionServiceClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.batchDeleteMetastorePartitions( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchDeleteMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchDeleteMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchDeleteMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchDeleteMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); } /** * Updates metastore partitions in a table. @@ -640,7 +709,7 @@ export class MetastorePartitionServiceClient { */ batchUpdateMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, @@ -660,7 +729,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchUpdateMetastorePartitions( request: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, @@ -670,7 +739,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; batchUpdateMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest, @@ -689,7 +758,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, @@ -715,12 +784,45 @@ export class MetastorePartitionServiceClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.batchUpdateMetastorePartitions( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchUpdateMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchUpdateMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchUpdateMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchUpdateMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); } /** * Gets metastore partitions from a table. @@ -753,7 +855,7 @@ export class MetastorePartitionServiceClient { */ listMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, @@ -773,7 +875,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; listMetastorePartitions( request: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, @@ -783,7 +885,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): void; listMetastorePartitions( request?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest, @@ -802,7 +904,7 @@ export class MetastorePartitionServiceClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, @@ -828,12 +930,39 @@ export class MetastorePartitionServiceClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listMetastorePartitions( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('listMetastorePartitions response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .listMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('listMetastorePartitions response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -856,7 +985,10 @@ export class MetastorePartitionServiceClient { * region_tag:bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async */ streamMetastorePartitions(options?: CallOptions): gax.CancellableStream { - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('streamMetastorePartitions stream %j', options); return this.innerApiCalls.streamMetastorePartitions(null, options); } @@ -877,7 +1009,7 @@ export class MetastorePartitionServiceClient { project: string, location: string, session: string, - stream: string + stream: string, ) { return this.pathTemplates.readStreamPathTemplate.render({ project: project, @@ -993,6 +1125,7 @@ export class MetastorePartitionServiceClient { close(): Promise { if (this.metastorePartitionServiceStub && !this._terminated) { return this.metastorePartitionServiceStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 2cef5cca838..9ac7f79367b 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -58,6 +59,8 @@ export class BigQueryStorageClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('bigquery-storage'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -92,7 +95,7 @@ export class BigQueryStorageClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -111,7 +114,7 @@ export class BigQueryStorageClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof BigQueryStorageClient; @@ -121,7 +124,7 @@ export class BigQueryStorageClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -202,13 +205,13 @@ export class BigQueryStorageClient { // Create useful helper objects for these. this.pathTemplates = { projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' + 'projects/{project}', ), readSessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/sessions/{session}' + 'projects/{project}/locations/{location}/sessions/{session}', ), streamPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/streams/{stream}' + 'projects/{project}/locations/{location}/streams/{stream}', ), }; @@ -218,7 +221,7 @@ export class BigQueryStorageClient { readRows: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -227,7 +230,7 @@ export class BigQueryStorageClient { 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -261,13 +264,13 @@ export class BigQueryStorageClient { this.bigQueryStorageStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage' + 'google.cloud.bigquery.storage.v1beta1.BigQueryStorage', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.cloud.bigquery.storage.v1beta1 .BigQueryStorage, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -285,13 +288,13 @@ export class BigQueryStorageClient { (...args: Array<{}>) => { if (this._terminated) { if (methodName in this.descriptors.stream) { - const stream = new PassThrough(); + const stream = new PassThrough({objectMode: true}); setImmediate(() => { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -303,7 +306,7 @@ export class BigQueryStorageClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = this.descriptors.stream[methodName] || undefined; @@ -311,7 +314,7 @@ export class BigQueryStorageClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -332,7 +335,7 @@ export class BigQueryStorageClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -350,7 +353,7 @@ export class BigQueryStorageClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'bigquerystorage.googleapis.com'; @@ -395,7 +398,7 @@ export class BigQueryStorageClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -458,7 +461,7 @@ export class BigQueryStorageClient { */ createReadSession( request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IReadSession, @@ -478,7 +481,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; createReadSession( request: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, @@ -488,7 +491,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; createReadSession( request?: protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest, @@ -507,7 +510,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IReadSession, @@ -534,8 +537,39 @@ export class BigQueryStorageClient { 'table_reference.project_id': request.tableReference!.projectId ?? '', 'table_reference.dataset_id': request.tableReference!.datasetId ?? '', }); - this.initialize(); - return this.innerApiCalls.createReadSession(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('createReadSession request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('createReadSession response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .createReadSession(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta1.IReadSession, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ICreateReadSessionRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('createReadSession response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Creates additional streams for a ReadSession. This API can be used to @@ -562,7 +596,7 @@ export class BigQueryStorageClient { */ batchCreateReadSessionStreams( request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, @@ -582,7 +616,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCreateReadSessionStreams( request: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, @@ -592,7 +626,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; batchCreateReadSessionStreams( request?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest, @@ -611,7 +645,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, @@ -637,12 +671,39 @@ export class BigQueryStorageClient { this._gaxModule.routingHeader.fromParams({ 'session.name': request.session!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.batchCreateReadSessionStreams( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchCreateReadSessionStreams request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('batchCreateReadSessionStreams response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchCreateReadSessionStreams(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('batchCreateReadSessionStreams response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Causes a single stream in a ReadSession to gracefully stop. This @@ -675,7 +736,7 @@ export class BigQueryStorageClient { */ finalizeStream( request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -695,7 +756,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; finalizeStream( request: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, @@ -705,7 +766,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; finalizeStream( request?: protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest, @@ -724,7 +785,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -750,8 +811,39 @@ export class BigQueryStorageClient { this._gaxModule.routingHeader.fromParams({ 'stream.name': request.stream!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.finalizeStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('finalizeStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('finalizeStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .finalizeStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta1.IFinalizeStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('finalizeStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Splits a given read stream into two Streams. These streams are referred to @@ -791,7 +883,7 @@ export class BigQueryStorageClient { */ splitReadStream( request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, @@ -811,7 +903,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; splitReadStream( request: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, @@ -821,7 +913,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): void; splitReadStream( request?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest, @@ -840,7 +932,7 @@ export class BigQueryStorageClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, @@ -866,8 +958,39 @@ export class BigQueryStorageClient { this._gaxModule.routingHeader.fromParams({ 'original_stream.name': request.originalStream!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.splitReadStream(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('splitReadStream request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('splitReadStream response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .splitReadStream(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('splitReadStream response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -898,7 +1021,7 @@ export class BigQueryStorageClient { */ readRows( request?: protos.google.cloud.bigquery.storage.v1beta1.IReadRowsRequest, - options?: CallOptions + options?: CallOptions, ): gax.CancellableStream { request = request || {}; options = options || {}; @@ -908,7 +1031,10 @@ export class BigQueryStorageClient { this._gaxModule.routingHeader.fromParams({ 'read_position.stream.name': request.readPosition!.stream!.name ?? '', }); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('readRows stream %j', options); return this.innerApiCalls.readRows(request, options); } @@ -1049,6 +1175,7 @@ export class BigQueryStorageClient { close(): Promise { if (this.bigQueryStorageStub && !this._terminated) { return this.bigQueryStorageStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/handwritten/bigquery-storage/src/v1beta1/index.ts b/handwritten/bigquery-storage/src/v1beta1/index.ts index 709e4bb9416..157199d4f44 100644 --- a/handwritten/bigquery-storage/src/v1beta1/index.ts +++ b/handwritten/bigquery-storage/src/v1beta1/index.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js index 6cfddb44e1d..15ec99a7e61 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.js @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts index 60e17b1986f..3c3da47e695 100644 --- a/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts +++ b/handwritten/bigquery-storage/system-test/fixtures/sample/src/index.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/system-test/install.ts b/handwritten/bigquery-storage/system-test/install.ts index 3785fd2f6f5..5257a7ba101 100644 --- a/handwritten/bigquery-storage/system-test/install.ts +++ b/handwritten/bigquery-storage/system-test/install.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27,9 +27,8 @@ describe('📦 pack-n-play test', () => { packageDir: process.cwd(), sample: { description: 'TypeScript user can use the type definitions', - devDependencies: ['@types/web'], ts: readFileSync( - './system-test/fixtures/sample/src/index.ts' + './system-test/fixtures/sample/src/index.ts', ).toString(), }, }; @@ -43,7 +42,7 @@ describe('📦 pack-n-play test', () => { sample: { description: 'JavaScript user can use the library', ts: readFileSync( - './system-test/fixtures/sample/src/index.js' + './system-test/fixtures/sample/src/index.js', ).toString(), }, }; diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index ff3415a1566..aa3c8b07a0a 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -29,7 +29,7 @@ import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; const pkg = JSON.parse( - readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8') + readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8'), ); const sandbox = sinon.createSandbox(); @@ -158,7 +158,9 @@ describe('managedwriter.WriterClient', () => { describe('Writer', () => { it('should invoke appendRows without errors', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const streamType: WriteStream['type'] = managedwriter.PendingStream; const client = new WriterClient(); client.setClient(bqWriteClient); @@ -209,7 +211,7 @@ describe('managedwriter.WriterClient', () => { { serializedRows: [serializedRow1Message, serializedRow2Message], }, - offset + offset, ); const result = await pw.getResult(); const responses: AppendRowsResponse[] = [ @@ -236,7 +238,9 @@ describe('managedwriter.WriterClient', () => { }); it('should invoke appendRows to default stream without errors', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -303,7 +307,9 @@ describe('managedwriter.WriterClient', () => { }); it('should invoke createWriteStream when streamType and destination table informed to createStreamConnection', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const streamType: WriteStream['type'] = managedwriter.PendingStream; const client = new WriterClient(); client.setClient(bqWriteClient); @@ -385,7 +391,9 @@ describe('managedwriter.WriterClient', () => { describe('StreamConnection', () => { it('should pass traceId on AppendRequests', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -609,7 +617,9 @@ describe('managedwriter.WriterClient', () => { describe('JSONWriter', () => { it('should invoke appendRows without errors', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const streamType: WriteStream['type'] = managedwriter.PendingStream; const client = new WriterClient(); client.setClient(bqWriteClient); @@ -683,7 +693,9 @@ describe('managedwriter.WriterClient', () => { }); it('should update proto descriptor automatically with appendRows without errors', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -765,7 +777,7 @@ describe('managedwriter.WriterClient', () => { adapt.convertBigQuerySchemaToStorageTableSchema(updatedSchema); assert.equal( result.updatedSchema.fields?.length, - updatedStorageSchema.fields?.length + updatedStorageSchema.fields?.length, ); assert.equal(receivedSchemaNotification, true); @@ -784,7 +796,7 @@ describe('managedwriter.WriterClient', () => { assert.equal(commitResponse.streamErrors?.length, 0); const [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${tableId}\` order by row_num` + `SELECT * FROM \`${projectId}.${datasetId}.${tableId}\` order by row_num`, ); assert.strictEqual(rows.length, offset); @@ -797,7 +809,9 @@ describe('managedwriter.WriterClient', () => { }).timeout(30 * 1000); it('Change data capture (CDC)', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -836,7 +850,7 @@ describe('managedwriter.WriterClient', () => { adapt.convertStorageSchemaToProto2Descriptor( storageSchema, 'root', - adapt.withChangeType() + adapt.withChangeType(), ); const row1 = { @@ -866,7 +880,7 @@ describe('managedwriter.WriterClient', () => { let result = await pw.getResult(); let [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id`, ); assert.strictEqual(rows.length, 2); @@ -894,7 +908,7 @@ describe('managedwriter.WriterClient', () => { result = await pw.getResult(); [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id`, ); assert.strictEqual(rows.length, 3); @@ -905,7 +919,7 @@ describe('managedwriter.WriterClient', () => { result = await pw.getResult(); [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id` + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by id`, ); assert.strictEqual(rows.length, 2); @@ -922,7 +936,9 @@ describe('managedwriter.WriterClient', () => { }); it('Flexible Columns and annotations', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -982,7 +998,7 @@ describe('managedwriter.WriterClient', () => { await pw.getResult(); const [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\`` + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\``, ); assert.strictEqual(rows.length, 2); assert.deepStrictEqual(rows, [ @@ -1006,7 +1022,9 @@ describe('managedwriter.WriterClient', () => { }); it('should fill default values when MissingValuesInterpretation is set', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1112,7 +1130,7 @@ describe('managedwriter.WriterClient', () => { assert.equal(commitResponse.streamErrors?.length, 0); const [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by row_num` + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by row_num`, ); assert.strictEqual(rows.length, 4); @@ -1179,7 +1197,9 @@ describe('managedwriter.WriterClient', () => { describe('should manage to send data in sequence scenario', () => { it('every 10 request drops the connection', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.enableWriteRetries(true); client.setClient(bqWriteClient); @@ -1230,7 +1250,9 @@ describe('managedwriter.WriterClient', () => { }).timeout(2 * 60 * 1000); it('opening the connection can fail more frequently', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.enableWriteRetries(true); client.setMaxRetryAttempts(100); // aggresive retries @@ -1287,7 +1309,9 @@ describe('managedwriter.WriterClient', () => { describe('should manage to send data in parallel', () => { it('every 10 request drops the connection', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.enableWriteRetries(true); client.setMaxRetryAttempts(10); @@ -1338,7 +1362,9 @@ describe('managedwriter.WriterClient', () => { }).timeout(2 * 60 * 1000); it('every 10 request there is a in stream INTERNAL error', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.enableWriteRetries(true); client.setClient(bqWriteClient); @@ -1357,7 +1383,8 @@ describe('managedwriter.WriterClient', () => { .callsFake( ( chunk: unknown, - cb?: ((error: Error | null | undefined) => void) | undefined + _, + cb?: ((error: Error | null | undefined) => void) | undefined, ): boolean => { const req = chunk as AppendRowsRequest; cb && cb(null); @@ -1385,7 +1412,7 @@ describe('managedwriter.WriterClient', () => { numSucess++; } return false; - } + }, ); const writer = new JSONWriter({ @@ -1419,7 +1446,9 @@ describe('managedwriter.WriterClient', () => { describe('Error Scenarios', () => { it('send request with mismatched proto descriptor', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1488,7 +1517,7 @@ describe('managedwriter.WriterClient', () => { assert.equal(storageErrors.length, 1); assert.equal( storageErrors[0].errorMessage, - 'Schema mismatch due to extra fields in user schema' + 'Schema mismatch due to extra fields in user schema', ); writer.close(); @@ -1498,7 +1527,9 @@ describe('managedwriter.WriterClient', () => { }); it('send request with invalid protobuf row', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1521,7 +1552,7 @@ describe('managedwriter.WriterClient', () => { protoDescriptor.field = protoDescriptor.field?.slice(0, 1); // leave just first field const invalidProto = Type.fromDescriptor( - protoDescriptor + protoDescriptor, ) as protobuf.Type; const row = { customer_name: 'Test', @@ -1532,13 +1563,13 @@ describe('managedwriter.WriterClient', () => { { serializedRows: [serialized], }, - 0 + 0, ); const res = await pw.getResult(); assert.notEqual(res.error, null); assert.equal( res.error?.message?.split('.')[0], - 'Errors found while processing rows' + 'Errors found while processing rows', ); writer.close(); @@ -1548,7 +1579,9 @@ describe('managedwriter.WriterClient', () => { }); it('send empty rows request should return an error', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1573,7 +1606,7 @@ describe('managedwriter.WriterClient', () => { assert.notEqual(res.error, null); assert.equal( res.error?.message?.split('.')[0], - 'Rows must be specified' + 'Rows must be specified', ); writer.close(); @@ -1583,7 +1616,9 @@ describe('managedwriter.WriterClient', () => { }); it('send large request should return an error', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1621,7 +1656,7 @@ describe('managedwriter.WriterClient', () => { { serializedRows: rows, }, - 0 + 0, ); let res = await badPw.getResult(); @@ -1632,7 +1667,7 @@ describe('managedwriter.WriterClient', () => { { serializedRows: [serializedRowMessage], }, - 0 + 0, ); res = await goodPw.getResult(); assert.equal(res.appendResult?.offset?.value, '0'); @@ -1644,7 +1679,9 @@ describe('managedwriter.WriterClient', () => { }); it('should trigger reconnection when connection closes and there are pending writes', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.enableWriteRetries(true); client.setClient(bqWriteClient); @@ -1677,7 +1714,7 @@ describe('managedwriter.WriterClient', () => { row_num: 2, }, ], - 0 + 0, ); await pw.getResult(); @@ -1704,7 +1741,9 @@ describe('managedwriter.WriterClient', () => { }); it('reconnect on idle connection', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1737,7 +1776,7 @@ describe('managedwriter.WriterClient', () => { // Simulate server sending ABORT error as the conn was idle const conn = connection['_connection'] as gax.CancellableStream; // private method const gerr = new gax.GoogleError( - 'Closing the stream because it has been inactive for 600 seconds' + 'Closing the stream because it has been inactive for 600 seconds', ); gerr.code = gax.Status.ABORTED; conn.emit('error', gerr); @@ -1766,7 +1805,9 @@ describe('managedwriter.WriterClient', () => { }).timeout(20 * 1000); it('should mark any pending writes with error if connection was closed', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1829,7 +1870,9 @@ describe('managedwriter.WriterClient', () => { describe('close', () => { it('should invoke close without errors', async () => { - bqWriteClient.initialize(); + bqWriteClient.initialize().catch(err => { + throw err; + }); const streamType: WriteStream['type'] = managedwriter.PendingStream; const client = new WriterClient(); client.setClient(bqWriteClient); @@ -1868,7 +1911,7 @@ describe('managedwriter.WriterClient', () => { { serializedRows: [serializedRow1Message, serializedRow2Message], }, - offset + offset, ); await pw.getResult(); @@ -1894,7 +1937,7 @@ describe('managedwriter.WriterClient', () => { async function deleteDatasets() { let [datasets] = await bigquery.getDatasets(); datasets = datasets.filter(dataset => - dataset.id?.includes(GCLOUD_TESTS_PREFIX) + dataset.id?.includes(GCLOUD_TESTS_PREFIX), ); for (const dataset of datasets) { diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts index 36effd487a5..063930bae13 100644 --- a/handwritten/bigquery-storage/system-test/reader_client_test.ts +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -180,7 +180,9 @@ describe('reader.ReaderClient', () => { describe('Read', () => { it('should invoke createReadSession and createReadStream without errors', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -226,7 +228,9 @@ describe('reader.ReaderClient', () => { describe('ArrowTableReader', () => { it('should allow to read a table as an Arrow byte stream', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -250,7 +254,15 @@ describe('reader.ReaderClient', () => { if (session?.arrowSchema?.serializedSchema) { serializedSchema = session?.arrowSchema?.serializedSchema; } - let buf = Buffer.from(serializedSchema); + // type checking needs to occur before calling Buffer.from + // has to do with overload resolution + // related to https://github.com/microsoft/TypeScript/issues/14107 + let buf: Buffer; + if (typeof serializedSchema === 'string') { + buf = Buffer.from(serializedSchema); + } else if (serializedSchema instanceof Uint8Array) { + buf = Buffer.from(serializedSchema); + } rawStream.on('data', (data: Uint8Array) => { buf = Buffer.concat([buf, data]); }); @@ -271,7 +283,9 @@ describe('reader.ReaderClient', () => { }); it('should allow to read a table as a stream of Arrow Record Batches', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -308,7 +322,9 @@ describe('reader.ReaderClient', () => { describe('TableReader', () => { it('should allow to read a table as a stream', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -483,7 +499,9 @@ describe('reader.ReaderClient', () => { }); it('should allow to read a table as tabledata.list RowsResponse', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -513,14 +531,16 @@ describe('reader.ReaderClient', () => { }); it('should allow to read a table with long running query', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); try { const genTableId = generateUuid(); await bigquery.query( - `CREATE TABLE ${projectId}.${datasetId}.${genTableId} AS SELECT num FROM UNNEST(GENERATE_ARRAY(1,1000000)) as num` + `CREATE TABLE ${projectId}.${datasetId}.${genTableId} AS SELECT num FROM UNNEST(GENERATE_ARRAY(1,1000000)) as num`, ); const reader = await client.createTableReader({ table: { @@ -549,7 +569,9 @@ describe('reader.ReaderClient', () => { describe('Error Scenarios', () => { it('send request with mismatched selected fields', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -580,9 +602,9 @@ describe('reader.ReaderClient', () => { assert.equal(foundError?.code, gax.Status.INVALID_ARGUMENT); assert.equal( foundError?.message.includes( - 'request failed: The following selected fields do not exist in the table schema: wrong_field' + 'request failed: The following selected fields do not exist in the table schema: wrong_field', ), - true + true, ); reader.close(); @@ -592,7 +614,9 @@ describe('reader.ReaderClient', () => { }); it('should trigger reconnection when intermitent error happens', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -628,7 +652,9 @@ describe('reader.ReaderClient', () => { describe('close', () => { it('should invoke close without errors', async () => { - bqReadClient.initialize(); + bqReadClient.initialize().catch(err => { + throw err; + }); const client = new ReadClient(); client.setClient(bqReadClient); @@ -674,7 +700,7 @@ describe('reader.ReaderClient', () => { async function deleteDatasets() { let [datasets] = await bigquery.getDatasets(); datasets = datasets.filter(dataset => - dataset.id?.includes(GCLOUD_TESTS_PREFIX) + dataset.id?.includes(GCLOUD_TESTS_PREFIX), ); for (const dataset of datasets) { diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index bf0260fd8a6..584b7f8c513 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -63,7 +63,7 @@ describe('Adapt Protos', () => { adapt.convertBigQuerySchemaToStorageTableSchema(schema); const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( storageSchema, - 'Test' + 'Test', ); assert.notEqual(protoDescriptor, null); if (!protoDescriptor) { @@ -102,7 +102,7 @@ describe('Adapt Protos', () => { storageSchema, 'Test', adapt.withChangeType(), - adapt.withChangeSequenceNumber() + adapt.withChangeSequenceNumber(), ); assert.notEqual(protoDescriptor, null); if (!protoDescriptor) { @@ -144,7 +144,7 @@ describe('Adapt Protos', () => { adapt.convertBigQuerySchemaToStorageTableSchema(schema); const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( storageSchema, - 'Flexible' + 'Flexible', ); assert.notEqual(protoDescriptor, null); @@ -243,7 +243,7 @@ describe('Adapt Protos', () => { adapt.convertBigQuerySchemaToStorageTableSchema(schema); const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( storageSchema, - 'Nested' + 'Nested', ); assert.notEqual(protoDescriptor, null); if (!protoDescriptor) { @@ -362,7 +362,7 @@ describe('Adapt Protos', () => { adapt.convertBigQuerySchemaToStorageTableSchema(schema); const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( storageSchema, - 'Test' + 'Test', ); assert.notEqual(protoDescriptor, null); if (!protoDescriptor) { @@ -426,7 +426,7 @@ describe('Adapt Protos', () => { }; const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( schema, - 'root' + 'root', ); if (!protoDescriptor) { throw Error('null proto descriptor set'); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 8bfd6ebe5a6..3f3d79d9c3f 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -68,7 +68,7 @@ function stubSimpleCallWithCallback( function stubServerStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -163,7 +163,7 @@ describe('v1.BigQueryReadClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual( servicePath, - 'bigquerystorage.configured.example.com' + 'bigquerystorage.configured.example.com', ); if (saved) { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; @@ -206,7 +206,9 @@ describe('v1.BigQueryReadClient', () => { projectId: 'bogus', }); assert.strictEqual(client.bigQueryReadStub, undefined); - await client.initialize(); + await client.initialize().catch(err => { + throw err; + }); assert(client.bigQueryReadStub); }); @@ -215,11 +217,18 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.bigQueryReadStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.bigQueryReadStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -228,9 +237,14 @@ describe('v1.BigQueryReadClient', () => { projectId: 'bogus', }); assert.strictEqual(client.bigQueryReadStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -274,19 +288,21 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); request.readSession ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] + ['readSession', 'table'], ); request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadSession() + new protos.google.cloud.bigquery.storage.v1.ReadSession(), ); client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); const [response] = await client.createReadSession(request); @@ -306,19 +322,21 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); request.readSession ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] + ['readSession', 'table'], ); request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadSession() + new protos.google.cloud.bigquery.storage.v1.ReadSession(), ); client.innerApiCalls.createReadSession = stubSimpleCallWithCallback(expectedResponse); @@ -327,14 +345,14 @@ describe('v1.BigQueryReadClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IReadSession | null + result?: protos.google.cloud.bigquery.storage.v1.IReadSession | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -354,21 +372,23 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); request.readSession ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] + ['readSession', 'table'], ); request.readSession.table = defaultValue1; - const expectedHeaderRequestParams = `read_session.table=${defaultValue1}`; + const expectedHeaderRequestParams = `read_session.table=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createReadSession = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createReadSession(request), expectedError); const actualRequest = ( @@ -386,18 +406,22 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); request.readSession ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateReadSessionRequest', - ['readSession', 'table'] + ['readSession', 'table'], ); request.readSession.table = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.createReadSession(request), expectedError); }); }); @@ -408,18 +432,20 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(), ); client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); const [response] = await client.splitReadStream(request); @@ -439,18 +465,20 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(), ); client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback(expectedResponse); @@ -459,14 +487,14 @@ describe('v1.BigQueryReadClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse | null + result?: protos.google.cloud.bigquery.storage.v1.ISplitReadStreamResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -486,20 +514,22 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.splitReadStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.splitReadStream(request), expectedError); const actualRequest = ( @@ -517,17 +547,21 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.SplitReadStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.splitReadStream(request), expectedError); }); }); @@ -538,18 +572,20 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] + ['readStream'], ); request.readStream = defaultValue1; - const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() + new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse(), ); client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); const stream = client.readRows(request); @@ -557,10 +593,10 @@ describe('v1.BigQueryReadClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -580,20 +616,24 @@ describe('v1.BigQueryReadClient', () => { it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => { const client = new bigqueryreadModule.v1.BigQueryReadClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', gaxServerStreamingRetries: true, }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] + ['readStream'], ); request.readStream = defaultValue1; - const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse() + new protos.google.cloud.bigquery.storage.v1.ReadRowsResponse(), ); client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); const stream = client.readRows(request); @@ -601,10 +641,10 @@ describe('v1.BigQueryReadClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -627,30 +667,32 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] + ['readStream'], ); request.readStream = defaultValue1; - const expectedHeaderRequestParams = `read_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `read_stream=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.readRows = stubServerStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.readRows(request); const promise = new Promise((resolve, reject) => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -672,17 +714,21 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.ReadRowsRequest', - ['readStream'] + ['readStream'], ); request.readStream = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); const stream = client.readRows(request, { retryRequestOptions: {noResponseRetries: 0}, }); @@ -690,10 +736,10 @@ describe('v1.BigQueryReadClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -710,7 +756,7 @@ describe('v1.BigQueryReadClient', () => { }); describe('Path templates', () => { - describe('project', () => { + describe('project', async () => { const fakePath = '/rendered/path/project'; const expectedParameters = { project: 'projectValue', @@ -719,7 +765,9 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.projectPathTemplate.render = sinon .stub() .returns(fakePath); @@ -733,7 +781,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.projectPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -743,12 +791,12 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.projectPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('readSession', () => { + describe('readSession', async () => { const fakePath = '/rendered/path/readSession'; const expectedParameters = { project: 'projectValue', @@ -759,7 +807,9 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.readSessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -771,13 +821,13 @@ describe('v1.BigQueryReadClient', () => { const result = client.readSessionPath( 'projectValue', 'locationValue', - 'sessionValue' + 'sessionValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readSessionPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -787,7 +837,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -797,7 +847,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -807,12 +857,12 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('readStream', () => { + describe('readStream', async () => { const fakePath = '/rendered/path/readStream'; const expectedParameters = { project: 'projectValue', @@ -824,7 +874,9 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.readStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -837,13 +889,13 @@ describe('v1.BigQueryReadClient', () => { 'projectValue', 'locationValue', 'sessionValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readStreamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -853,7 +905,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -863,7 +915,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -873,7 +925,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -883,12 +935,12 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('table', () => { + describe('table', async () => { const fakePath = '/rendered/path/table'; const expectedParameters = { project: 'projectValue', @@ -899,7 +951,9 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.tablePathTemplate.render = sinon .stub() .returns(fakePath); @@ -911,13 +965,13 @@ describe('v1.BigQueryReadClient', () => { const result = client.tablePath( 'projectValue', 'datasetValue', - 'tableValue' + 'tableValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.tablePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -927,7 +981,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -937,7 +991,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -947,12 +1001,12 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('writeStream', () => { + describe('writeStream', async () => { const fakePath = '/rendered/path/writeStream'; const expectedParameters = { project: 'projectValue', @@ -964,7 +1018,9 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.writeStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -977,13 +1033,13 @@ describe('v1.BigQueryReadClient', () => { 'projectValue', 'datasetValue', 'tableValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -993,7 +1049,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1003,7 +1059,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1013,7 +1069,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1023,7 +1079,7 @@ describe('v1.BigQueryReadClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts index 2f03bc17ac7..27cdb803d13 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_storage_v1beta1.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -68,7 +68,7 @@ function stubSimpleCallWithCallback( function stubServerStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -165,7 +165,7 @@ describe('v1beta1.BigQueryStorageClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual( servicePath, - 'bigquerystorage.configured.example.com' + 'bigquerystorage.configured.example.com', ); if (saved) { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; @@ -217,11 +217,18 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.bigQueryStorageStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.bigQueryStorageStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -230,9 +237,14 @@ describe('v1beta1.BigQueryStorageClient', () => { projectId: 'bogus', }); assert.strictEqual(client.bigQueryStorageStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -276,25 +288,25 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(), ); request.tableReference ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] + ['tableReference', 'projectId'], ); request.tableReference.projectId = defaultValue1; request.tableReference ??= {}; const defaultValue2 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] + ['tableReference', 'datasetId'], ); request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1 ?? ''}&table_reference.dataset_id=${defaultValue2 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession(), ); client.innerApiCalls.createReadSession = stubSimpleCall(expectedResponse); const [response] = await client.createReadSession(request); @@ -314,25 +326,25 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(), ); request.tableReference ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] + ['tableReference', 'projectId'], ); request.tableReference.projectId = defaultValue1; request.tableReference ??= {}; const defaultValue2 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] + ['tableReference', 'datasetId'], ); request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1 ?? ''}&table_reference.dataset_id=${defaultValue2 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadSession() + new protos.google.cloud.bigquery.storage.v1beta1.ReadSession(), ); client.innerApiCalls.createReadSession = stubSimpleCallWithCallback(expectedResponse); @@ -341,14 +353,14 @@ describe('v1beta1.BigQueryStorageClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.IReadSession | null + result?: protos.google.cloud.bigquery.storage.v1beta1.IReadSession | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -368,27 +380,27 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(), ); request.tableReference ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] + ['tableReference', 'projectId'], ); request.tableReference.projectId = defaultValue1; request.tableReference ??= {}; const defaultValue2 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] + ['tableReference', 'datasetId'], ); request.tableReference.datasetId = defaultValue2; - const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1}&table_reference.dataset_id=${defaultValue2}`; + const expectedHeaderRequestParams = `table_reference.project_id=${defaultValue1 ?? ''}&table_reference.dataset_id=${defaultValue2 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createReadSession = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createReadSession(request), expectedError); const actualRequest = ( @@ -406,24 +418,26 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest() + new protos.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(), ); request.tableReference ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'projectId'] + ['tableReference', 'projectId'], ); request.tableReference.projectId = defaultValue1; request.tableReference ??= {}; const defaultValue2 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest', - ['tableReference', 'datasetId'] + ['tableReference', 'datasetId'], ); request.tableReference.datasetId = defaultValue2; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.createReadSession(request), expectedError); }); }); @@ -434,19 +448,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(), ); request.session ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] + ['session', 'name'], ); request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedHeaderRequestParams = `session.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(), ); client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall(expectedResponse); @@ -467,19 +481,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(), ); request.session ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] + ['session', 'name'], ); request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedHeaderRequestParams = `session.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(), ); client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCallWithCallback(expectedResponse); @@ -488,14 +502,14 @@ describe('v1beta1.BigQueryStorageClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse | null + result?: protos.google.cloud.bigquery.storage.v1beta1.IBatchCreateReadSessionStreamsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -515,25 +529,25 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(), ); request.session ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] + ['session', 'name'], ); request.session.name = defaultValue1; - const expectedHeaderRequestParams = `session.name=${defaultValue1}`; + const expectedHeaderRequestParams = `session.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCreateReadSessionStreams = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.batchCreateReadSessionStreams(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.batchCreateReadSessionStreams as SinonStub @@ -550,21 +564,23 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(), ); request.session ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest', - ['session', 'name'] + ['session', 'name'], ); request.session.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.batchCreateReadSessionStreams(request), - expectedError + expectedError, ); }); }); @@ -575,19 +591,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(), ); request.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] + ['stream', 'name'], ); request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.finalizeStream = stubSimpleCall(expectedResponse); const [response] = await client.finalizeStream(request); @@ -607,19 +623,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(), ); request.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] + ['stream', 'name'], ); request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.finalizeStream = stubSimpleCallWithCallback(expectedResponse); @@ -628,14 +644,14 @@ describe('v1beta1.BigQueryStorageClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -655,21 +671,21 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(), ); request.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] + ['stream', 'name'], ); request.stream.name = defaultValue1; - const expectedHeaderRequestParams = `stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `stream.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.finalizeStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.finalizeStream(request), expectedError); const actualRequest = ( @@ -687,18 +703,20 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(), ); request.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest', - ['stream', 'name'] + ['stream', 'name'], ); request.stream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.finalizeStream(request), expectedError); }); }); @@ -709,19 +727,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(), ); request.originalStream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] + ['originalStream', 'name'], ); request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(), ); client.innerApiCalls.splitReadStream = stubSimpleCall(expectedResponse); const [response] = await client.splitReadStream(request); @@ -741,19 +759,19 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(), ); request.originalStream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] + ['originalStream', 'name'], ); request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(), ); client.innerApiCalls.splitReadStream = stubSimpleCallWithCallback(expectedResponse); @@ -762,14 +780,14 @@ describe('v1beta1.BigQueryStorageClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse | null + result?: protos.google.cloud.bigquery.storage.v1beta1.ISplitReadStreamResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -789,21 +807,21 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(), ); request.originalStream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] + ['originalStream', 'name'], ); request.originalStream.name = defaultValue1; - const expectedHeaderRequestParams = `original_stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `original_stream.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.splitReadStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.splitReadStream(request), expectedError); const actualRequest = ( @@ -821,18 +839,20 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest() + new protos.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(), ); request.originalStream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest', - ['originalStream', 'name'] + ['originalStream', 'name'], ); request.originalStream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.splitReadStream(request), expectedError); }); }); @@ -843,20 +863,20 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(), ); request.readPosition ??= {}; request.readPosition.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] + ['readPosition', 'stream', 'name'], ); request.readPosition.stream.name = defaultValue1; - const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(), ); client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); const stream = client.readRows(request); @@ -864,10 +884,10 @@ describe('v1beta1.BigQueryStorageClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -887,22 +907,24 @@ describe('v1beta1.BigQueryStorageClient', () => { it('invokes readRows without error and gaxServerStreamingRetries enabled', async () => { const client = new bigquerystorageModule.v1beta1.BigQueryStorageClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', gaxServerStreamingRetries: true, }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(), ); request.readPosition ??= {}; request.readPosition.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] + ['readPosition', 'stream', 'name'], ); request.readPosition.stream.name = defaultValue1; - const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(), ); client.innerApiCalls.readRows = stubServerStreamingCall(expectedResponse); const stream = client.readRows(request); @@ -910,10 +932,10 @@ describe('v1beta1.BigQueryStorageClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -936,32 +958,32 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(), ); request.readPosition ??= {}; request.readPosition.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] + ['readPosition', 'stream', 'name'], ); request.readPosition.stream.name = defaultValue1; - const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1}`; + const expectedHeaderRequestParams = `read_position.stream.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.readRows = stubServerStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.readRows(request); const promise = new Promise((resolve, reject) => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -983,19 +1005,21 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest() + new protos.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(), ); request.readPosition ??= {}; request.readPosition.stream ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest', - ['readPosition', 'stream', 'name'] + ['readPosition', 'stream', 'name'], ); request.readPosition.stream.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); const stream = client.readRows(request, { retryRequestOptions: {noResponseRetries: 0}, }); @@ -1003,10 +1027,10 @@ describe('v1beta1.BigQueryStorageClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse + response: protos.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -1023,7 +1047,7 @@ describe('v1beta1.BigQueryStorageClient', () => { }); describe('Path templates', () => { - describe('project', () => { + describe('project', async () => { const fakePath = '/rendered/path/project'; const expectedParameters = { project: 'projectValue', @@ -1032,7 +1056,7 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.projectPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1046,7 +1070,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.projectPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1056,12 +1080,12 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.projectPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('readSession', () => { + describe('readSession', async () => { const fakePath = '/rendered/path/readSession'; const expectedParameters = { project: 'projectValue', @@ -1072,7 +1096,7 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.readSessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1084,13 +1108,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const result = client.readSessionPath( 'projectValue', 'locationValue', - 'sessionValue' + 'sessionValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readSessionPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1100,7 +1124,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1110,7 +1134,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1120,12 +1144,12 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('stream', () => { + describe('stream', async () => { const fakePath = '/rendered/path/stream'; const expectedParameters = { project: 'projectValue', @@ -1136,7 +1160,7 @@ describe('v1beta1.BigQueryStorageClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.streamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1148,13 +1172,13 @@ describe('v1beta1.BigQueryStorageClient', () => { const result = client.streamPath( 'projectValue', 'locationValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.streamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1164,7 +1188,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.streamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1174,7 +1198,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.streamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1184,7 +1208,7 @@ describe('v1beta1.BigQueryStorageClient', () => { assert( (client.pathTemplates.streamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts index 9d4f4765225..7c78a4d3b30 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_write_v1.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -68,7 +68,7 @@ function stubSimpleCallWithCallback( function stubBidiStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -156,7 +156,7 @@ describe('v1.BigQueryWriteClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual( servicePath, - 'bigquerystorage.configured.example.com' + 'bigquerystorage.configured.example.com', ); if (saved) { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; @@ -208,11 +208,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.bigQueryWriteStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.bigQueryWriteStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -221,9 +228,14 @@ describe('v1.BigQueryWriteClient', () => { projectId: 'bogus', }); assert.strictEqual(client.bigQueryWriteStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -267,18 +279,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() + new protos.google.cloud.bigquery.storage.v1.WriteStream(), ); client.innerApiCalls.createWriteStream = stubSimpleCall(expectedResponse); const [response] = await client.createWriteStream(request); @@ -298,18 +310,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() + new protos.google.cloud.bigquery.storage.v1.WriteStream(), ); client.innerApiCalls.createWriteStream = stubSimpleCallWithCallback(expectedResponse); @@ -318,14 +330,14 @@ describe('v1.BigQueryWriteClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -345,20 +357,20 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createWriteStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createWriteStream(request), expectedError); const actualRequest = ( @@ -376,17 +388,19 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.createWriteStream(request), expectedError); }); }); @@ -397,18 +411,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() + new protos.google.cloud.bigquery.storage.v1.WriteStream(), ); client.innerApiCalls.getWriteStream = stubSimpleCall(expectedResponse); const [response] = await client.getWriteStream(request); @@ -428,18 +442,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.WriteStream() + new protos.google.cloud.bigquery.storage.v1.WriteStream(), ); client.innerApiCalls.getWriteStream = stubSimpleCallWithCallback(expectedResponse); @@ -448,14 +462,14 @@ describe('v1.BigQueryWriteClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null + result?: protos.google.cloud.bigquery.storage.v1.IWriteStream | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -475,20 +489,20 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getWriteStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getWriteStream(request), expectedError); const actualRequest = ( @@ -506,17 +520,19 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.GetWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getWriteStream(request), expectedError); }); }); @@ -527,18 +543,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(), ); client.innerApiCalls.finalizeWriteStream = stubSimpleCall(expectedResponse); @@ -559,18 +575,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(), ); client.innerApiCalls.finalizeWriteStream = stubSimpleCallWithCallback(expectedResponse); @@ -579,14 +595,14 @@ describe('v1.BigQueryWriteClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse | null + result?: protos.google.cloud.bigquery.storage.v1.IFinalizeWriteStreamResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -606,20 +622,20 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; - const expectedHeaderRequestParams = `name=${defaultValue1}`; + const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.finalizeWriteStream = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.finalizeWriteStream(request), expectedError); const actualRequest = ( @@ -637,17 +653,19 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest() + new protos.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.finalizeWriteStream(request), expectedError); }); }); @@ -658,18 +676,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(), ); client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall(expectedResponse); @@ -690,18 +708,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(), ); client.innerApiCalls.batchCommitWriteStreams = stubSimpleCallWithCallback(expectedResponse); @@ -710,14 +728,14 @@ describe('v1.BigQueryWriteClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse | null + result?: protos.google.cloud.bigquery.storage.v1.IBatchCommitWriteStreamsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -737,24 +755,24 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCommitWriteStreams = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.batchCommitWriteStreams(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.batchCommitWriteStreams as SinonStub @@ -771,20 +789,22 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest() + new protos.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.batchCommitWriteStreams(request), - expectedError + expectedError, ); }); }); @@ -795,18 +815,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] + ['writeStream'], ); request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `write_stream=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse(), ); client.innerApiCalls.flushRows = stubSimpleCall(expectedResponse); const [response] = await client.flushRows(request); @@ -826,18 +846,18 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] + ['writeStream'], ); request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `write_stream=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse() + new protos.google.cloud.bigquery.storage.v1.FlushRowsResponse(), ); client.innerApiCalls.flushRows = stubSimpleCallWithCallback(expectedResponse); @@ -846,14 +866,14 @@ describe('v1.BigQueryWriteClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse | null + result?: protos.google.cloud.bigquery.storage.v1.IFlushRowsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -873,16 +893,16 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] + ['writeStream'], ); request.writeStream = defaultValue1; - const expectedHeaderRequestParams = `write_stream=${defaultValue1}`; + const expectedHeaderRequestParams = `write_stream=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.flushRows = stubSimpleCall(undefined, expectedError); await assert.rejects(client.flushRows(request), expectedError); @@ -901,17 +921,19 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest() + new protos.google.cloud.bigquery.storage.v1.FlushRowsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1.FlushRowsRequest', - ['writeStream'] + ['writeStream'], ); request.writeStream = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.flushRows(request), expectedError); }); }); @@ -922,13 +944,13 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse() + new protos.google.cloud.bigquery.storage.v1.AppendRowsResponse(), ); client.innerApiCalls.appendRows = stubBidiStreamingCall(expectedResponse); const stream = client.appendRows(); @@ -936,10 +958,10 @@ describe('v1.BigQueryWriteClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -952,12 +974,12 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.innerApiCalls.appendRows as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); @@ -966,24 +988,24 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest() + new protos.google.cloud.bigquery.storage.v1.AppendRowsRequest(), ); const expectedError = new Error('expected'); client.innerApiCalls.appendRows = stubBidiStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.appendRows(); const promise = new Promise((resolve, reject) => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse + response: protos.google.cloud.bigquery.storage.v1.AppendRowsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -995,18 +1017,18 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.innerApiCalls.appendRows as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); }); describe('Path templates', () => { - describe('project', () => { + describe('project', async () => { const fakePath = '/rendered/path/project'; const expectedParameters = { project: 'projectValue', @@ -1015,7 +1037,7 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.projectPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1029,7 +1051,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.projectPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1039,12 +1061,12 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.projectPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('readSession', () => { + describe('readSession', async () => { const fakePath = '/rendered/path/readSession'; const expectedParameters = { project: 'projectValue', @@ -1055,7 +1077,7 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.readSessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1067,13 +1089,13 @@ describe('v1.BigQueryWriteClient', () => { const result = client.readSessionPath( 'projectValue', 'locationValue', - 'sessionValue' + 'sessionValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readSessionPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1083,7 +1105,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1093,7 +1115,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1103,12 +1125,12 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readSessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('readStream', () => { + describe('readStream', async () => { const fakePath = '/rendered/path/readStream'; const expectedParameters = { project: 'projectValue', @@ -1120,7 +1142,7 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.readStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1133,13 +1155,13 @@ describe('v1.BigQueryWriteClient', () => { 'projectValue', 'locationValue', 'sessionValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readStreamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1149,7 +1171,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1159,7 +1181,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1169,7 +1191,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1179,12 +1201,12 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('table', () => { + describe('table', async () => { const fakePath = '/rendered/path/table'; const expectedParameters = { project: 'projectValue', @@ -1195,7 +1217,7 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.tablePathTemplate.render = sinon .stub() .returns(fakePath); @@ -1207,13 +1229,13 @@ describe('v1.BigQueryWriteClient', () => { const result = client.tablePath( 'projectValue', 'datasetValue', - 'tableValue' + 'tableValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.tablePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1223,7 +1245,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1233,7 +1255,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1243,12 +1265,12 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('writeStream', () => { + describe('writeStream', async () => { const fakePath = '/rendered/path/writeStream'; const expectedParameters = { project: 'projectValue', @@ -1260,7 +1282,7 @@ describe('v1.BigQueryWriteClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.writeStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1273,13 +1295,13 @@ describe('v1.BigQueryWriteClient', () => { 'projectValue', 'datasetValue', 'tableValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.writeStreamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1289,7 +1311,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1299,7 +1321,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1309,7 +1331,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1319,7 +1341,7 @@ describe('v1.BigQueryWriteClient', () => { assert( (client.pathTemplates.writeStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts index 925501688bd..cf8ad078d22 100644 --- a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts +++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -68,7 +68,7 @@ function stubSimpleCallWithCallback( function stubBidiStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -123,7 +123,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { it('sets apiEndpoint according to universe domain camelCase', () => { const client = new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( - {universeDomain: 'example.com'} + {universeDomain: 'example.com'}, ); const servicePath = client.apiEndpoint; assert.strictEqual(servicePath, 'bigquerystorage.example.com'); @@ -132,7 +132,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { it('sets apiEndpoint according to universe domain snakeCase', () => { const client = new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( - {universe_domain: 'example.com'} + {universe_domain: 'example.com'}, ); const servicePath = client.apiEndpoint; assert.strictEqual(servicePath, 'bigquerystorage.example.com'); @@ -159,12 +159,12 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; const client = new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( - {universeDomain: 'configured.example.com'} + {universeDomain: 'configured.example.com'}, ); const servicePath = client.apiEndpoint; assert.strictEqual( servicePath, - 'bigquerystorage.configured.example.com' + 'bigquerystorage.configured.example.com', ); if (saved) { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; @@ -177,7 +177,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { it('does not allow setting both universeDomain and universe_domain', () => { assert.throws(() => { new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( - {universe_domain: 'example.com', universeDomain: 'example.net'} + {universe_domain: 'example.com', universeDomain: 'example.net'}, ); }); }); @@ -201,7 +201,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { new metastorepartitionserviceModule.v1alpha.MetastorePartitionServiceClient( { fallback: true, - } + }, ); assert(client); }); @@ -212,10 +212,12 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); assert.strictEqual(client.metastorePartitionServiceStub, undefined); - await client.initialize(); + await client.initialize().catch(err => { + throw err; + }); assert(client.metastorePartitionServiceStub); }); @@ -225,13 +227,20 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); - assert(client.metastorePartitionServiceStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.metastorePartitionServiceStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -240,12 +249,17 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); assert.strictEqual(client.metastorePartitionServiceStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -255,7 +269,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); const result = await client.getProjectId(); @@ -270,7 +284,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); client.auth.getProjectId = sinon .stub() @@ -296,20 +310,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse(), ); client.innerApiCalls.batchCreateMetastorePartitions = stubSimpleCall(expectedResponse); @@ -331,20 +347,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse(), ); client.innerApiCalls.batchCreateMetastorePartitions = stubSimpleCallWithCallback(expectedResponse); @@ -353,14 +371,14 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse | null + result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchCreateMetastorePartitionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -381,26 +399,28 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCreateMetastorePartitions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.batchCreateMetastorePartitions(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.batchCreateMetastorePartitions as SinonStub @@ -418,22 +438,26 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.batchCreateMetastorePartitions(request), - expectedError + expectedError, ); }); }); @@ -445,20 +469,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.batchDeleteMetastorePartitions = stubSimpleCall(expectedResponse); @@ -480,20 +506,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.batchDeleteMetastorePartitions = stubSimpleCallWithCallback(expectedResponse); @@ -502,14 +530,14 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -530,26 +558,28 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchDeleteMetastorePartitions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.batchDeleteMetastorePartitions(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub @@ -567,22 +597,26 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.batchDeleteMetastorePartitions(request), - expectedError + expectedError, ); }); }); @@ -594,20 +628,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse(), ); client.innerApiCalls.batchUpdateMetastorePartitions = stubSimpleCall(expectedResponse); @@ -629,20 +665,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse(), ); client.innerApiCalls.batchUpdateMetastorePartitions = stubSimpleCallWithCallback(expectedResponse); @@ -651,14 +689,14 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse | null + result?: protos.google.cloud.bigquery.storage.v1alpha.IBatchUpdateMetastorePartitionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -679,26 +717,28 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchUpdateMetastorePartitions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.batchUpdateMetastorePartitions(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub @@ -716,22 +756,26 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.batchUpdateMetastorePartitions(request), - expectedError + expectedError, ); }); }); @@ -743,20 +787,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse(), ); client.innerApiCalls.listMetastorePartitions = stubSimpleCall(expectedResponse); @@ -778,20 +824,22 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse(), ); client.innerApiCalls.listMetastorePartitions = stubSimpleCallWithCallback(expectedResponse); @@ -800,14 +848,14 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { request, ( err?: Error | null, - result?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse | null + result?: protos.google.cloud.bigquery.storage.v1alpha.IListMetastorePartitionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -828,26 +876,28 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; - const expectedHeaderRequestParams = `parent=${defaultValue1}`; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listMetastorePartitions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.listMetastorePartitions(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.listMetastorePartitions as SinonStub @@ -865,22 +915,26 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.listMetastorePartitions(request), - expectedError + expectedError, ); }); }); @@ -892,15 +946,17 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse() + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse(), ); client.innerApiCalls.streamMetastorePartitions = stubBidiStreamingCall(expectedResponse); @@ -909,10 +965,10 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -925,12 +981,12 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.innerApiCalls.streamMetastorePartitions as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); @@ -940,26 +996,28 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); const request = generateSampleMessage( - new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest() + new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(), ); const expectedError = new Error('expected'); client.innerApiCalls.streamMetastorePartitions = stubBidiStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.streamMetastorePartitions(); const promise = new Promise((resolve, reject) => { stream.on( 'data', ( - response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse + response: protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -971,18 +1029,18 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.innerApiCalls.streamMetastorePartitions as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); }); describe('Path templates', () => { - describe('readStream', () => { + describe('readStream', async () => { const fakePath = '/rendered/path/readStream'; const expectedParameters = { project: 'projectValue', @@ -995,9 +1053,11 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.readStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1010,13 +1070,13 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { 'projectValue', 'locationValue', 'sessionValue', - 'streamValue' + 'streamValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.readStreamPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1026,7 +1086,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1036,7 +1096,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1046,7 +1106,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1056,12 +1116,12 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.readStreamPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('table', () => { + describe('table', async () => { const fakePath = '/rendered/path/table'; const expectedParameters = { project: 'projectValue', @@ -1073,9 +1133,11 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', - } + }, ); - client.initialize(); + await client.initialize().catch(err => { + throw err; + }); client.pathTemplates.tablePathTemplate.render = sinon .stub() .returns(fakePath); @@ -1087,13 +1149,13 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { const result = client.tablePath( 'projectValue', 'datasetValue', - 'tableValue' + 'tableValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.tablePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -1103,7 +1165,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1113,7 +1175,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -1123,7 +1185,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { assert( (client.pathTemplates.tablePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json index c78f1c884ef..ca73e7bfc82 100644 --- a/handwritten/bigquery-storage/tsconfig.json +++ b/handwritten/bigquery-storage/tsconfig.json @@ -5,7 +5,7 @@ "outDir": "build", "resolveJsonModule": true, "lib": [ - "es2018", + "es2023", "dom" ] }, @@ -14,6 +14,9 @@ "src/**/*.ts", "test/*.ts", "test/**/*.ts", - "system-test/*.ts" + "system-test/*.ts", + "src/**/*.json", + "samples/**/*.json", + "protos/protos.json" ] } From 9ad2fd50e810623020ee0fbef48a572985c4930a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 4 Apr 2025 16:45:57 +0200 Subject: [PATCH 300/333] chore(deps): update dependency @octokit/rest to v21 (#546) --- handwritten/bigquery-storage/.github/scripts/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json index 2c2e5207df9..baad0f8f857 100644 --- a/handwritten/bigquery-storage/.github/scripts/package.json +++ b/handwritten/bigquery-storage/.github/scripts/package.json @@ -14,7 +14,7 @@ "js-yaml": "^4.1.0" }, "devDependencies": { - "@octokit/rest": "^19.0.0", + "@octokit/rest": "^21.0.0", "mocha": "^10.0.0", "sinon": "^18.0.0" } From f41a49d50cf35f64620a46206a71cede1b715971 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 08:01:45 -0400 Subject: [PATCH 301/333] chore: update owlbot-nodejs dependencies (#547) * chore: update owlbot-nodejs dependencies * Update container_test.yaml Source-Link: https://github.com/googleapis/synthtool/commit/1e798e6de27c63a88a1768c2a5f73b85e1523a21 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 3 ++- handwritten/bigquery-storage/.github/scripts/package.json | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index d21b9dd5db6..60443342360 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:7c7ecb30ffac77ebdacd15f4b1c6c888f01c212832b9efd73fbf4bfc1284b7d4 + digest: sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee +# created: 2025-04-08T17:33:08.498793944Z diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json index baad0f8f857..2c2e5207df9 100644 --- a/handwritten/bigquery-storage/.github/scripts/package.json +++ b/handwritten/bigquery-storage/.github/scripts/package.json @@ -14,7 +14,7 @@ "js-yaml": "^4.1.0" }, "devDependencies": { - "@octokit/rest": "^21.0.0", + "@octokit/rest": "^19.0.0", "mocha": "^10.0.0", "sinon": "^18.0.0" } From e8fdfaca2fd913fa5084e81634a8d7cf416d3102 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 8 May 2025 22:24:05 +0200 Subject: [PATCH 302/333] fix(deps): update dependency @google-cloud/bigquery to v8 (#550) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 8068c8f8abc..94002c18ac4 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -38,7 +38,7 @@ "protobufjs": "^7.2.4" }, "devDependencies": { - "@google-cloud/bigquery": "^7.9.3", + "@google-cloud/bigquery": "^8.0.0", "@types/extend": "^3.0.4", "@types/mocha": "^10.0.10", "@types/node": "^22.13.14", From c29e6705eec8166f23a22c4e2f8cc00961d016c5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 11:30:14 -0400 Subject: [PATCH 303/333] chore(main): release 5.0.0 (#545) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 5.0.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- handwritten/bigquery-storage/CHANGELOG.md | 16 + handwritten/bigquery-storage/package.json | 2 +- handwritten/bigquery-storage/protos/protos.js | 580 +++++++++++++----- .../bigquery-storage/protos/protos.json | 57 +- ...data.google.cloud.bigquery.storage.v1.json | 2 +- ...data_google.cloud.bigquery.storage.v1.json | 2 +- ...google.cloud.bigquery.storage.v1alpha.json | 2 +- ...google.cloud.bigquery.storage.v1beta1.json | 2 +- ...google.cloud.bigquery.storage.v1beta1.json | 2 +- 9 files changed, 496 insertions(+), 169 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index d3328ad146c..9a35776e987 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [5.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.11.0...v5.0.0) (2025-05-08) + + +### ⚠ BREAKING CHANGES + +* upgrade to node 18 ([#542](https://github.com/googleapis/nodejs-bigquery-storage/issues/542)) + +### Bug Fixes + +* **deps:** Update dependency @google-cloud/bigquery to v8 ([#550](https://github.com/googleapis/nodejs-bigquery-storage/issues/550)) ([05df963](https://github.com/googleapis/nodejs-bigquery-storage/commit/05df963172b8f0fe5ec71e1bebfad6e065c9aa1b)) + + +### Miscellaneous Chores + +* Upgrade to node 18 ([#542](https://github.com/googleapis/nodejs-bigquery-storage/issues/542)) ([b3eb245](https://github.com/googleapis/nodejs-bigquery-storage/commit/b3eb245dbd1a32b8f49497bc8d3d487422b853ec)) + ## [4.11.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.10.1...v4.11.0) (2025-01-09) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 94002c18ac4..7813816dc24 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "4.11.0", + "version": "5.0.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index a511324a5da..5058f54c6c2 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -160,12 +160,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowSchema.decode = function decode(reader, length) { + ArrowSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedSchema = reader.bytes(); @@ -383,12 +385,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowRecordBatch.decode = function decode(reader, length) { + ArrowRecordBatch.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowRecordBatch(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedRecordBatch = reader.bytes(); @@ -622,12 +626,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowSerializationOptions.decode = function decode(reader, length) { + ArrowSerializationOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { message.bufferCompression = reader.int32(); @@ -865,12 +871,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroSchema.decode = function decode(reader, length) { + AvroSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.schema = reader.string(); @@ -1079,12 +1087,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroRows.decode = function decode(reader, length) { + AvroRows.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroRows(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedBinaryRows = reader.bytes(); @@ -1318,12 +1328,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroSerializationOptions.decode = function decode(reader, length) { + AvroSerializationOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.enableDisplayNameAttribute = reader.bool(); @@ -1521,12 +1533,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ProtoSchema.decode = function decode(reader, length) { + ProtoSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.protoDescriptor = $root.google.protobuf.DescriptorProto.decode(reader, reader.uint32()); @@ -1731,12 +1745,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ProtoRows.decode = function decode(reader, length) { + ProtoRows.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ProtoRows(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.serializedRows && message.serializedRows.length)) @@ -2351,12 +2367,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateReadSessionRequest.decode = function decode(reader, length) { + CreateReadSessionRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -2607,12 +2625,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsRequest.decode = function decode(reader, length) { + ReadRowsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.readStream = reader.string(); @@ -2837,12 +2857,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ThrottleState.decode = function decode(reader, length) { + ThrottleState.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ThrottleState(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.throttlePercent = reader.int32(); @@ -3040,12 +3062,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamStats.decode = function decode(reader, length) { + StreamStats.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { message.progress = $root.google.cloud.bigquery.storage.v1.StreamStats.Progress.decode(reader, reader.uint32()); @@ -3256,12 +3280,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Progress.decode = function decode(reader, length) { + Progress.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StreamStats.Progress(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.atResponseStart = reader.double(); @@ -3583,12 +3609,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsResponse.decode = function decode(reader, length) { + ReadRowsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadRowsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 3: { message.avroRows = $root.google.cloud.bigquery.storage.v1.AvroRows.decode(reader, reader.uint32()); @@ -3965,12 +3993,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamRequest.decode = function decode(reader, length) { + SplitReadStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -4192,12 +4222,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamResponse.decode = function decode(reader, length) { + SplitReadStreamResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.SplitReadStreamResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.primaryStream = $root.google.cloud.bigquery.storage.v1.ReadStream.decode(reader, reader.uint32()); @@ -4429,12 +4461,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateWriteStreamRequest.decode = function decode(reader, length) { + CreateWriteStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -4732,12 +4766,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AppendRowsRequest.decode = function decode(reader, length) { + AppendRowsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest(), key, value; while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.writeStream = reader.string(); @@ -5127,12 +5163,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowData.decode = function decode(reader, length) { + ArrowData.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowData(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.writerSchema = $root.google.cloud.bigquery.storage.v1.ArrowSchema.decode(reader, reader.uint32()); @@ -5364,12 +5402,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ProtoData.decode = function decode(reader, length) { + ProtoData.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.writerSchema = $root.google.cloud.bigquery.storage.v1.ProtoSchema.decode(reader, reader.uint32()); @@ -5669,12 +5709,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AppendRowsResponse.decode = function decode(reader, length) { + AppendRowsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.appendResult = $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult.decode(reader, reader.uint32()); @@ -5966,12 +6008,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AppendResult.decode = function decode(reader, length) { + AppendResult.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.offset = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); @@ -6188,12 +6232,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - GetWriteStreamRequest.decode = function decode(reader, length) { + GetWriteStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.GetWriteStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -6441,12 +6487,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCommitWriteStreamsRequest.decode = function decode(reader, length) { + BatchCommitWriteStreamsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -6684,12 +6732,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCommitWriteStreamsResponse.decode = function decode(reader, length) { + BatchCommitWriteStreamsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.commitTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); @@ -6924,12 +6974,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FinalizeWriteStreamRequest.decode = function decode(reader, length) { + FinalizeWriteStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -7127,12 +7179,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FinalizeWriteStreamResponse.decode = function decode(reader, length) { + FinalizeWriteStreamResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.rowCount = reader.int64(); @@ -7355,12 +7409,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FlushRowsRequest.decode = function decode(reader, length) { + FlushRowsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.writeStream = reader.string(); @@ -7576,12 +7632,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FlushRowsResponse.decode = function decode(reader, length) { + FlushRowsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.FlushRowsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.offset = reader.int64(); @@ -7815,12 +7873,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StorageError.decode = function decode(reader, length) { + StorageError.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.StorageError(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.code = reader.int32(); @@ -8189,12 +8249,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - RowError.decode = function decode(reader, length) { + RowError.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.RowError(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.index = reader.int64(); @@ -8628,12 +8690,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.decode = function decode(reader, length) { + ReadSession.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -9097,12 +9161,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableModifiers.decode = function decode(reader, length) { + TableModifiers.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableModifiers(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); @@ -9388,12 +9454,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decode = function decode(reader, length) { + TableReadOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.selectedFields && message.selectedFields.length)) @@ -9733,12 +9801,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadStream.decode = function decode(reader, length) { + ReadStream.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.ReadStream(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -10018,12 +10088,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - WriteStream.decode = function decode(reader, length) { + WriteStream.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.WriteStream(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -10391,12 +10463,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableSchema.decode = function decode(reader, length) { + TableSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.fields && message.fields.length)) @@ -10714,12 +10788,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableFieldSchema.decode = function decode(reader, length) { + TableFieldSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -11275,12 +11351,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldElementType.decode = function decode(reader, length) { + FieldElementType.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.type = reader.int32(); @@ -11798,12 +11876,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateMetastorePartitionRequest.decode = function decode(reader, length) { + CreateMetastorePartitionRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -12043,12 +12123,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateMetastorePartitionsRequest.decode = function decode(reader, length) { + BatchCreateMetastorePartitionsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -12293,12 +12375,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateMetastorePartitionsResponse.decode = function decode(reader, length) { + BatchCreateMetastorePartitionsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.partitions && message.partitions.length)) @@ -12528,12 +12612,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchDeleteMetastorePartitionsRequest.decode = function decode(reader, length) { + BatchDeleteMetastorePartitionsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -12774,12 +12860,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UpdateMetastorePartitionRequest.decode = function decode(reader, length) { + UpdateMetastorePartitionRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.metastorePartition = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition.decode(reader, reader.uint32()); @@ -13013,12 +13101,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchUpdateMetastorePartitionsRequest.decode = function decode(reader, length) { + BatchUpdateMetastorePartitionsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -13250,12 +13340,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchUpdateMetastorePartitionsResponse.decode = function decode(reader, length) { + BatchUpdateMetastorePartitionsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.partitions && message.partitions.length)) @@ -13483,12 +13575,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ListMetastorePartitionsRequest.decode = function decode(reader, length) { + ListMetastorePartitionsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -13724,12 +13818,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ListMetastorePartitionsResponse.decode = function decode(reader, length) { + ListMetastorePartitionsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.partitions = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList.decode(reader, reader.uint32()); @@ -13985,12 +14081,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamMetastorePartitionsRequest.decode = function decode(reader, length) { + StreamMetastorePartitionsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.parent = reader.string(); @@ -14244,12 +14342,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamMetastorePartitionsResponse.decode = function decode(reader, length) { + StreamMetastorePartitionsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { message.totalPartitionsStreamedCount = reader.int64(); @@ -14499,12 +14599,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchSizeTooLargeError.decode = function decode(reader, length) { + BatchSizeTooLargeError.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.BatchSizeTooLargeError(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.maxBatchSize = reader.int64(); @@ -14740,12 +14842,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldSchema.decode = function decode(reader, length) { + FieldSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.FieldSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -14989,12 +15093,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StorageDescriptor.decode = function decode(reader, length) { + StorageDescriptor.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StorageDescriptor(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.locationUri = reader.string(); @@ -15258,12 +15364,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SerDeInfo.decode = function decode(reader, length) { + SerDeInfo.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.SerDeInfo(), key, value; while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -15570,12 +15678,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MetastorePartition.decode = function decode(reader, length) { + MetastorePartition.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartition(), key, value; while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.values && message.values.length)) @@ -15903,12 +16013,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MetastorePartitionList.decode = function decode(reader, length) { + MetastorePartitionList.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionList(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.partitions && message.partitions.length)) @@ -16125,12 +16237,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadStream.decode = function decode(reader, length) { + ReadStream.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.ReadStream(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -16330,12 +16444,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamList.decode = function decode(reader, length) { + StreamList.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.StreamList(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.streams && message.streams.length)) @@ -16554,12 +16670,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MetastorePartitionValues.decode = function decode(reader, length) { + MetastorePartitionValues.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.values && message.values.length)) @@ -16783,12 +16901,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowSchema.decode = function decode(reader, length) { + ArrowSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedSchema = reader.bytes(); @@ -17006,12 +17126,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ArrowRecordBatch.decode = function decode(reader, length) { + ArrowRecordBatch.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatch(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedRecordBatch = reader.bytes(); @@ -17245,12 +17367,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroSchema.decode = function decode(reader, length) { + AvroSchema.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroSchema(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.schema = reader.string(); @@ -17459,12 +17583,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - AvroRows.decode = function decode(reader, length) { + AvroRows.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.AvroRows(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.serializedBinaryRows = reader.bytes(); @@ -17711,12 +17837,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReadOptions.decode = function decode(reader, length) { + TableReadOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReadOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.selectedFields && message.selectedFields.length)) @@ -18141,12 +18269,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Stream.decode = function decode(reader, length) { + Stream.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Stream(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -18355,12 +18485,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamPosition.decode = function decode(reader, length) { + StreamPosition.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamPosition(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); @@ -18683,12 +18815,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadSession.decode = function decode(reader, length) { + ReadSession.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadSession(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -19151,12 +19285,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CreateReadSessionRequest.decode = function decode(reader, length) { + CreateReadSessionRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.tableReference = $root.google.cloud.bigquery.storage.v1beta1.TableReference.decode(reader, reader.uint32()); @@ -19490,12 +19626,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsRequest.decode = function decode(reader, length) { + ReadRowsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.readPosition = $root.google.cloud.bigquery.storage.v1beta1.StreamPosition.decode(reader, reader.uint32()); @@ -19731,12 +19869,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StreamStatus.decode = function decode(reader, length) { + StreamStatus.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.StreamStatus(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.estimatedRowCount = reader.int64(); @@ -20001,12 +20141,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Progress.decode = function decode(reader, length) { + Progress.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.Progress(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.atResponseStart = reader.float(); @@ -20217,12 +20359,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ThrottleStatus.decode = function decode(reader, length) { + ThrottleStatus.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ThrottleStatus(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.throttlePercent = reader.int32(); @@ -20511,12 +20655,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReadRowsResponse.decode = function decode(reader, length) { + ReadRowsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 3: { message.avroRows = $root.google.cloud.bigquery.storage.v1beta1.AvroRows.decode(reader, reader.uint32()); @@ -20867,12 +21013,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length) { + BatchCreateReadSessionStreamsRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.session = $root.google.cloud.bigquery.storage.v1beta1.ReadSession.decode(reader, reader.uint32()); @@ -21090,12 +21238,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length) { + BatchCreateReadSessionStreamsResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.streams && message.streams.length)) @@ -21312,12 +21462,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FinalizeStreamRequest.decode = function decode(reader, length) { + FinalizeStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { message.stream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); @@ -21531,12 +21683,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamRequest.decode = function decode(reader, length) { + SplitReadStreamRequest.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.originalStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); @@ -21763,12 +21917,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SplitReadStreamResponse.decode = function decode(reader, length) { + SplitReadStreamResponse.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.primaryStream = $root.google.cloud.bigquery.storage.v1beta1.Stream.decode(reader, reader.uint32()); @@ -22011,12 +22167,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableReference.decode = function decode(reader, length) { + TableReference.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableReference(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.projectId = reader.string(); @@ -22239,12 +22397,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - TableModifiers.decode = function decode(reader, length) { + TableModifiers.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta1.TableModifiers(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.snapshotTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); @@ -22470,12 +22630,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorSet.decode = function decode(reader, length) { + FileDescriptorSet.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorSet(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.file && message.file.length)) @@ -22870,12 +23032,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileDescriptorProto.decode = function decode(reader, length) { + FileDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -23537,12 +23701,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DescriptorProto.decode = function decode(reader, length) { + DescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -24022,12 +24188,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ExtensionRange.decode = function decode(reader, length) { + ExtensionRange.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ExtensionRange(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.start = reader.int32(); @@ -24266,12 +24434,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ReservedRange.decode = function decode(reader, length) { + ReservedRange.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DescriptorProto.ReservedRange(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.start = reader.int32(); @@ -24522,12 +24692,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ExtensionRangeOptions.decode = function decode(reader, length) { + ExtensionRangeOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) @@ -24867,12 +25039,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Declaration.decode = function decode(reader, length) { + Declaration.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ExtensionRangeOptions.Declaration(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.number = reader.int32(); @@ -25246,12 +25420,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldDescriptorProto.decode = function decode(reader, length) { + FieldDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -25771,12 +25947,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - OneofDescriptorProto.decode = function decode(reader, length) { + OneofDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -26042,12 +26220,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumDescriptorProto.decode = function decode(reader, length) { + EnumDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -26361,12 +26541,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumReservedRange.decode = function decode(reader, length) { + EnumReservedRange.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumDescriptorProto.EnumReservedRange(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.start = reader.int32(); @@ -26602,12 +26784,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumValueDescriptorProto.decode = function decode(reader, length) { + EnumValueDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -26859,12 +27043,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceDescriptorProto.decode = function decode(reader, length) { + ServiceDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -27167,12 +27353,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MethodDescriptorProto.decode = function decode(reader, length) { + MethodDescriptorProto.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodDescriptorProto(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.name = reader.string(); @@ -27671,12 +27859,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FileOptions.decode = function decode(reader, length) { + FileOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FileOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.javaPackage = reader.string(); @@ -28291,12 +28481,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MessageOptions.decode = function decode(reader, length) { + MessageOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MessageOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.messageSetWireFormat = reader.bool(); @@ -28782,12 +28974,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldOptions.decode = function decode(reader, length) { + FieldOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.ctype = reader.int32(); @@ -29525,12 +29719,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EditionDefault.decode = function decode(reader, length) { + EditionDefault.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.EditionDefault(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 3: { message.edition = reader.int32(); @@ -29821,12 +30017,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - OneofOptions.decode = function decode(reader, length) { + OneofOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); @@ -30107,12 +30305,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumOptions.decode = function decode(reader, length) { + EnumOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { message.allowAlias = reader.bool(); @@ -30419,12 +30619,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - EnumValueOptions.decode = function decode(reader, length) { + EnumValueOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.EnumValueOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.deprecated = reader.bool(); @@ -30741,12 +30943,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ServiceOptions.decode = function decode(reader, length) { + ServiceOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.ServiceOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 34: { message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); @@ -31089,12 +31293,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MethodOptions.decode = function decode(reader, length) { + MethodOptions.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.MethodOptions(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 33: { message.deprecated = reader.bool(); @@ -31506,12 +31712,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UninterpretedOption.decode = function decode(reader, length) { + UninterpretedOption.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { if (!(message.name && message.name.length)) @@ -31845,12 +32053,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - NamePart.decode = function decode(reader, length) { + NamePart.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UninterpretedOption.NamePart(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.namePart = reader.string(); @@ -32121,12 +32331,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FeatureSet.decode = function decode(reader, length) { + FeatureSet.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.fieldPresence = reader.int32(); @@ -32656,12 +32868,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FeatureSetDefaults.decode = function decode(reader, length) { + FeatureSetDefaults.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSetDefaults(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.defaults && message.defaults.length)) @@ -33040,12 +33254,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FeatureSetEditionDefault.decode = function decode(reader, length) { + FeatureSetEditionDefault.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 3: { message.edition = reader.int32(); @@ -33330,12 +33546,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - SourceCodeInfo.decode = function decode(reader, length) { + SourceCodeInfo.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.location && message.location.length)) @@ -33605,12 +33823,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Location.decode = function decode(reader, length) { + Location.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.SourceCodeInfo.Location(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.path && message.path.length)) @@ -33916,12 +34136,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - GeneratedCodeInfo.decode = function decode(reader, length) { + GeneratedCodeInfo.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.annotation && message.annotation.length)) @@ -34184,12 +34406,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Annotation.decode = function decode(reader, length) { + Annotation.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.GeneratedCodeInfo.Annotation(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.path && message.path.length)) @@ -34510,12 +34734,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Duration.decode = function decode(reader, length) { + Duration.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Duration(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.seconds = reader.int64(); @@ -34751,12 +34977,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Timestamp.decode = function decode(reader, length) { + Timestamp.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.seconds = reader.int64(); @@ -34981,12 +35209,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DoubleValue.decode = function decode(reader, length) { + DoubleValue.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.DoubleValue(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.double(); @@ -35184,12 +35414,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FloatValue.decode = function decode(reader, length) { + FloatValue.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FloatValue(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.float(); @@ -35387,12 +35619,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Int64Value.decode = function decode(reader, length) { + Int64Value.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int64Value(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.int64(); @@ -35604,12 +35838,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UInt64Value.decode = function decode(reader, length) { + UInt64Value.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt64Value(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.uint64(); @@ -35821,12 +36057,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Int32Value.decode = function decode(reader, length) { + Int32Value.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Int32Value(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.int32(); @@ -36024,12 +36262,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - UInt32Value.decode = function decode(reader, length) { + UInt32Value.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.UInt32Value(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.uint32(); @@ -36227,12 +36467,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BoolValue.decode = function decode(reader, length) { + BoolValue.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BoolValue(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.bool(); @@ -36430,12 +36672,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - StringValue.decode = function decode(reader, length) { + StringValue.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.StringValue(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.string(); @@ -36633,12 +36877,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - BytesValue.decode = function decode(reader, length) { + BytesValue.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.BytesValue(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.value = reader.bytes(); @@ -36856,12 +37102,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Any.decode = function decode(reader, length) { + Any.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Any(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.type_url = reader.string(); @@ -37070,12 +37318,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Empty.decode = function decode(reader, length) { + Empty.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Empty(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { default: reader.skipType(tag & 7); @@ -37258,12 +37508,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - FieldMask.decode = function decode(reader, length) { + FieldMask.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldMask(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.paths && message.paths.length)) @@ -37500,12 +37752,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Http.decode = function decode(reader, length) { + Http.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Http(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { if (!(message.rules && message.rules.length)) @@ -37850,12 +38104,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - HttpRule.decode = function decode(reader, length) { + HttpRule.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.HttpRule(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.selector = reader.string(); @@ -38234,12 +38490,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CustomHttpPattern.decode = function decode(reader, length) { + CustomHttpPattern.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CustomHttpPattern(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.kind = reader.string(); @@ -38466,12 +38724,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CommonLanguageSettings.decode = function decode(reader, length) { + CommonLanguageSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CommonLanguageSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.referenceDocsUri = reader.string(); @@ -38835,12 +39095,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ClientLibrarySettings.decode = function decode(reader, length) { + ClientLibrarySettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ClientLibrarySettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.version = reader.string(); @@ -39364,12 +39626,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Publishing.decode = function decode(reader, length) { + Publishing.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.Publishing(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 2: { if (!(message.methodSettings && message.methodSettings.length)) @@ -39815,12 +40079,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - JavaSettings.decode = function decode(reader, length) { + JavaSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.JavaSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.libraryPackage = reader.string(); @@ -40082,12 +40348,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - CppSettings.decode = function decode(reader, length) { + CppSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.CppSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -40290,12 +40558,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - PhpSettings.decode = function decode(reader, length) { + PhpSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PhpSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -40498,12 +40768,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - PythonSettings.decode = function decode(reader, length) { + PythonSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -40706,12 +40978,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - NodeSettings.decode = function decode(reader, length) { + NodeSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.NodeSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -40979,12 +41253,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - DotnetSettings.decode = function decode(reader, length) { + DotnetSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.DotnetSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -41358,12 +41634,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - RubySettings.decode = function decode(reader, length) { + RubySettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.RubySettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -41566,12 +41844,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - GoSettings.decode = function decode(reader, length) { + GoSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); @@ -41798,12 +42078,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - MethodSettings.decode = function decode(reader, length) { + MethodSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.MethodSettings(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.selector = reader.string(); @@ -42076,12 +42358,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - LongRunning.decode = function decode(reader, length) { + LongRunning.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.MethodSettings.LongRunning(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.initialPollDelay = $root.google.protobuf.Duration.decode(reader, reader.uint32()); @@ -42503,12 +42787,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceDescriptor.decode = function decode(reader, length) { + ResourceDescriptor.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceDescriptor(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.type = reader.string(); @@ -42898,12 +43184,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - ResourceReference.decode = function decode(reader, length) { + ResourceReference.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.ResourceReference(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.type = reader.string(); @@ -43150,12 +43438,14 @@ * @throws {Error} If the payload is not a reader or valid buffer * @throws {$protobuf.util.ProtocolError} If required fields are missing */ - Status.decode = function decode(reader, length) { + Status.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.Status(); while (reader.pos < end) { var tag = reader.uint32(); + if (tag === error) + break; switch (tag >>> 3) { case 1: { message.code = reader.int32(); diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 10d106ce114..18f0616d7a3 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1,7 +1,4 @@ { - "options": { - "syntax": "proto3" - }, "nested": { "google": { "nested": { @@ -2183,6 +2180,7 @@ }, "nested": { "FileDescriptorSet": { + "edition": "proto2", "fields": { "file": { "rule": "repeated", @@ -2192,6 +2190,7 @@ } }, "Edition": { + "edition": "proto2", "values": { "EDITION_UNKNOWN": 0, "EDITION_PROTO2": 998, @@ -2207,6 +2206,7 @@ } }, "FileDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2224,18 +2224,12 @@ "publicDependency": { "rule": "repeated", "type": "int32", - "id": 10, - "options": { - "packed": false - } + "id": 10 }, "weakDependency": { "rule": "repeated", "type": "int32", - "id": 11, - "options": { - "packed": false - } + "id": 11 }, "messageType": { "rule": "repeated", @@ -2276,6 +2270,7 @@ } }, "DescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2358,6 +2353,7 @@ } }, "ExtensionRangeOptions": { + "edition": "proto2", "fields": { "uninterpretedOption": { "rule": "repeated", @@ -2431,6 +2427,7 @@ } }, "FieldDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2510,6 +2507,7 @@ } }, "OneofDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2522,6 +2520,7 @@ } }, "EnumDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2563,6 +2562,7 @@ } }, "EnumValueDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2579,6 +2579,7 @@ } }, "ServiceDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2596,6 +2597,7 @@ } }, "MethodDescriptorProto": { + "edition": "proto2", "fields": { "name": { "type": "string", @@ -2630,6 +2632,7 @@ } }, "FileOptions": { + "edition": "proto2", "fields": { "javaPackage": { "type": "string", @@ -2771,6 +2774,7 @@ } }, "MessageOptions": { + "edition": "proto2", "fields": { "messageSetWireFormat": { "type": "bool", @@ -2844,6 +2848,7 @@ ] }, "FieldOptions": { + "edition": "proto2", "fields": { "ctype": { "type": "CType", @@ -2905,10 +2910,7 @@ "targets": { "rule": "repeated", "type": "OptionTargetType", - "id": 19, - "options": { - "packed": false - } + "id": 19 }, "editionDefaults": { "rule": "repeated", @@ -2992,6 +2994,7 @@ } }, "OneofOptions": { + "edition": "proto2", "fields": { "features": { "type": "FeatureSet", @@ -3011,6 +3014,7 @@ ] }, "EnumOptions": { + "edition": "proto2", "fields": { "allowAlias": { "type": "bool", @@ -3054,6 +3058,7 @@ ] }, "EnumValueOptions": { + "edition": "proto2", "fields": { "deprecated": { "type": "bool", @@ -3087,6 +3092,7 @@ ] }, "ServiceOptions": { + "edition": "proto2", "fields": { "features": { "type": "FeatureSet", @@ -3113,6 +3119,7 @@ ] }, "MethodOptions": { + "edition": "proto2", "fields": { "deprecated": { "type": "bool", @@ -3155,6 +3162,7 @@ } }, "UninterpretedOption": { + "edition": "proto2", "fields": { "name": { "rule": "repeated", @@ -3204,6 +3212,7 @@ } }, "FeatureSet": { + "edition": "proto2", "fields": { "fieldPresence": { "type": "FieldPresence", @@ -3345,6 +3354,7 @@ } }, "FeatureSetDefaults": { + "edition": "proto2", "fields": { "defaults": { "rule": "repeated", @@ -3376,6 +3386,7 @@ } }, "SourceCodeInfo": { + "edition": "proto2", "fields": { "location": { "rule": "repeated", @@ -3389,12 +3400,18 @@ "path": { "rule": "repeated", "type": "int32", - "id": 1 + "id": 1, + "options": { + "packed": true + } }, "span": { "rule": "repeated", "type": "int32", - "id": 2 + "id": 2, + "options": { + "packed": true + } }, "leadingComments": { "type": "string", @@ -3414,6 +3431,7 @@ } }, "GeneratedCodeInfo": { + "edition": "proto2", "fields": { "annotation": { "rule": "repeated", @@ -3427,7 +3445,10 @@ "path": { "rule": "repeated", "type": "int32", - "id": 1 + "id": 1, + "options": { + "packed": true + } }, "sourceFile": { "type": "string", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 68730059e5b..9f15ae55678 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.11.0", + "version": "5.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index 4aefa926648..e13091a5cea 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.11.0", + "version": "5.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index 1136e6c3321..e2f4a8a9991 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.11.0", + "version": "5.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 93417409058..4dd83b7176f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.11.0", + "version": "5.0.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 93417409058..4dd83b7176f 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "4.11.0", + "version": "5.0.0", "language": "TYPESCRIPT", "apis": [ { From 686e27fe797d03fddbad9c746de8d0ae892fb72b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 16:03:46 -0400 Subject: [PATCH 304/333] feat: Add BigQuery Metastore Partition Service API version v1beta (#549) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add BigQuery Metastore Partition Service API version v1beta PiperOrigin-RevId: 753333720 Source-Link: https://github.com/googleapis/googleapis/commit/389abd1ee15f6d6f79dcd76120b1e29d74edb23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/a87588791ac0057b5bcd5b91e497a5daff66ebbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTg3NTg4NzkxYWMwMDU3YjViY2Q1YjkxZTQ5N2E1ZGFmZjY2ZWJiYyJ9 chore: upgrade typescript generator to 4.9.0 chore: updated gapic-generator-typescript WORKSPACE section to match the latest from the generator chore: added Required Typecheck Performance Selection section to .bazelrc feat: add protobufjs 2023 edition support feat: selective gapic support for typescript generation fix: catch dangling promises PiperOrigin-RevId: 751558264 Source-Link: https://github.com/googleapis/googleapis/commit/f1321311609eb26f43fe38ce442fec34f165d0ab Source-Link: https://github.com/googleapis/googleapis-gen/commit/7aaa93d9d4bbd0c87adc18b4dc0384b783800a3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2FhYTkzZDlkNGJiZDBjODdhZGMxOGI0ZGMwMzg0Yjc4MzgwMGEzYiJ9 feat: increased the number of partitions can be written in a single request docs: updated the number of partitions (from 100 to 900) can be inserted, updated and deleted in a single request BREAKING CHANGE: remove `location` from http annotations in all of the service requests PiperOrigin-RevId: 747473743 Source-Link: https://github.com/googleapis/googleapis/commit/1c153adc542b4c915eeab5290bc42581c821cc93 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b5aa0e891f457b1c5ce75b120d1b65c8738776b5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjVhYTBlODkxZjQ1N2IxYzVjZTc1YjEyMGQxYjY1Yzg3Mzg3NzZiNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * export v1beta * undo owlbot tsconfig changes --------- Co-authored-by: Owl Bot Co-authored-by: Leah Cole --- .../storage/v1alpha/metastore_partition.proto | 43 +- .../storage/v1beta/metastore_partition.proto | 313 + .../bigquery/storage/v1beta/partition.proto | 140 + .../bigquery-storage/protos/protos.d.ts | 2256 +++++++ handwritten/bigquery-storage/protos/protos.js | 5428 ++++++++++++++++- .../bigquery-storage/protos/protos.json | 548 +- ...rvice.batch_create_metastore_partitions.js | 10 +- ...rvice.batch_delete_metastore_partitions.js | 10 +- ...rvice.batch_update_metastore_partitions.js | 8 + ...ition_service.list_metastore_partitions.js | 8 + ...google.cloud.bigquery.storage.v1alpha.json | 24 +- ...rvice.batch_create_metastore_partitions.js | 84 + ...rvice.batch_delete_metastore_partitions.js | 77 + ...rvice.batch_update_metastore_partitions.js | 76 + ...ition_service.list_metastore_partitions.js | 84 + ...ion_service.stream_metastore_partitions.js | 82 + ..._google.cloud.bigquery.storage.v1beta.json | 259 + handwritten/bigquery-storage/src/index.ts | 2 + .../metastore_partition_service_client.ts | 32 +- .../src/v1beta/gapic_metadata.json | 68 + .../bigquery-storage/src/v1beta/index.ts | 19 + .../metastore_partition_service_client.ts | 1161 ++++ ...store_partition_service_client_config.json | 54 + ...etastore_partition_service_proto_list.json | 4 + .../test/gapic_big_query_read_v1.ts | 72 +- ...pic_metastore_partition_service_v1alpha.ts | 84 +- ...apic_metastore_partition_service_v1beta.ts | 1151 ++++ 27 files changed, 11955 insertions(+), 142 deletions(-) create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/metastore_partition.proto create mode 100644 handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/partition.proto create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js create mode 100644 handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json create mode 100644 handwritten/bigquery-storage/src/v1beta/gapic_metadata.json create mode 100644 handwritten/bigquery-storage/src/v1beta/index.ts create mode 100644 handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts create mode 100644 handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client_config.json create mode 100644 handwritten/bigquery-storage/src/v1beta/metastore_partition_service_proto_list.json create mode 100644 handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto index 0c57403e28c..68ba61b6b1d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1alpha/metastore_partition.proto @@ -36,8 +36,8 @@ option (google.api.resource_definition) = { }; // BigQuery Metastore Partition Service API. -// This service is used for managing metastore partitions in BigQuery metastore. -// The service supports only batch operations for write. +// This service is used for managing metastore partitions in BigQuery +// metastore. The service supports only batch operations for write. service MetastorePartitionService { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = @@ -48,7 +48,7 @@ service MetastorePartitionService { rpc BatchCreateMetastorePartitions(BatchCreateMetastorePartitionsRequest) returns (BatchCreateMetastorePartitionsResponse) { option (google.api.http) = { - post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate" + post: "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate" body: "*" }; } @@ -57,7 +57,7 @@ service MetastorePartitionService { rpc BatchDeleteMetastorePartitions(BatchDeleteMetastorePartitionsRequest) returns (google.protobuf.Empty) { option (google.api.http) = { - post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete" + post: "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete" body: "*" }; } @@ -66,7 +66,7 @@ service MetastorePartitionService { rpc BatchUpdateMetastorePartitions(BatchUpdateMetastorePartitionsRequest) returns (BatchUpdateMetastorePartitionsResponse) { option (google.api.http) = { - post: "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate" + post: "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate" body: "*" }; } @@ -125,8 +125,15 @@ message BatchCreateMetastorePartitionsRequest { // add_partitions(..). If the flag is set to false, the server will return // ALREADY_EXISTS if any partition already exists. If the flag is set to true, // the server will skip existing partitions and insert only the non-existing - // partitions. + // partitions. A maximum of 900 partitions can be inserted in a batch. bool skip_existing_partitions = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. Limited to 256 characters. This is expected, + // but not required, to be globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // Response message for BatchCreateMetastorePartitions. @@ -148,9 +155,16 @@ message BatchDeleteMetastorePartitionsRequest { ]; // Required. The list of metastore partitions (identified by its values) to be - // deleted. A maximum of 100 partitions can be deleted in a batch. + // deleted. A maximum of 900 partitions can be deleted in a batch. repeated MetastorePartitionValues partition_values = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. This is expected, but not required, to be + // globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // Request message for UpdateMetastorePartition. @@ -177,11 +191,19 @@ message BatchUpdateMetastorePartitionsRequest { // Required. Requests to update metastore partitions in the table. repeated UpdateMetastorePartitionRequest requests = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. This is expected, but not required, to be + // globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // Response message for BatchUpdateMetastorePartitions. message BatchUpdateMetastorePartitionsResponse { // The list of metastore partitions that have been updated. + // A maximum of 900 partitions can be updated in a batch. repeated MetastorePartition partitions = 1; } @@ -206,6 +228,13 @@ message ListMetastorePartitionsRequest { // "numeric_field BETWEEN 1.0 AND 5.0" // Restricted to a maximum length for 1 MB. string filter = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. Limited to 256 characters. This is expected, + // but not required, to be globally unique. + string trace_id = 3 [(google.api.field_behavior) = OPTIONAL]; } // Response message for ListMetastorePartitions. diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/metastore_partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/metastore_partition.proto new file mode 100644 index 00000000000..75cd43fb7c5 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/metastore_partition.proto @@ -0,0 +1,313 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/storage/v1beta/partition.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1Beta"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta/storagepb;storagepb"; +option java_multiple_files = true; +option java_outer_classname = "MetastorePartitionServiceProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1beta"; +option (google.api.resource_definition) = { + type: "bigquery.googleapis.com/Table" + pattern: "projects/{project}/datasets/{dataset}/tables/{table}" +}; + +// BigQuery Metastore Partition Service API. +// This service is used for managing metastore partitions in BigQuery +// metastore. The service supports only batch operations for write. +service MetastorePartitionService { + option (google.api.default_host) = "bigquerystorage.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/cloud-platform"; + + // Adds metastore partitions to a table. + rpc BatchCreateMetastorePartitions(BatchCreateMetastorePartitionsRequest) + returns (BatchCreateMetastorePartitionsResponse) { + option (google.api.http) = { + post: "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate" + body: "*" + }; + } + + // Deletes metastore partitions from a table. + rpc BatchDeleteMetastorePartitions(BatchDeleteMetastorePartitionsRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete" + body: "*" + }; + } + + // Updates metastore partitions in a table. + rpc BatchUpdateMetastorePartitions(BatchUpdateMetastorePartitionsRequest) + returns (BatchUpdateMetastorePartitionsResponse) { + option (google.api.http) = { + post: "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate" + body: "*" + }; + } + + // Gets metastore partitions from a table. + rpc ListMetastorePartitions(ListMetastorePartitionsRequest) + returns (ListMetastorePartitionsResponse) { + option (google.api.http) = { + get: "/v1beta/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list" + }; + option (google.api.method_signature) = "parent"; + } + + // This is a bi-di streaming rpc method that allows the client to send + // a stream of partitions and commit all of them atomically at the end. + // If the commit is successful, the server will return a + // response and close the stream. If the commit fails (due to duplicate + // partitions or other reason), the server will close the stream with an + // error. This method is only available via the gRPC API (not REST). + rpc StreamMetastorePartitions(stream StreamMetastorePartitionsRequest) + returns (stream StreamMetastorePartitionsResponse) {} +} + +// Request message for CreateMetastorePartition. The MetastorePartition is +// uniquely identified by values, which is an ordered list. Hence, there is no +// separate name or partition id field. +message CreateMetastorePartitionRequest { + // Required. Reference to the table to where the metastore partition to be + // added, in the format of + // projects/{project}/databases/{databases}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. The metastore partition to be added. + MetastorePartition metastore_partition = 2 + [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for BatchCreateMetastorePartitions. +message BatchCreateMetastorePartitionsRequest { + // Required. Reference to the table to where the metastore partitions to be + // added, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. Requests to add metastore partitions to the table. + repeated CreateMetastorePartitionRequest requests = 2 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. Mimics the ifNotExists flag in IMetaStoreClient + // add_partitions(..). If the flag is set to false, the server will return + // ALREADY_EXISTS if any partition already exists. If the flag is set to true, + // the server will skip existing partitions and insert only the non-existing + // partitions. A maximum of 900 partitions can be inserted in a batch. + bool skip_existing_partitions = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. Limited to 256 characters. This is expected, + // but not required, to be globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for BatchCreateMetastorePartitions. +message BatchCreateMetastorePartitionsResponse { + // The list of metastore partitions that have been created. + repeated MetastorePartition partitions = 1; +} + +// Request message for BatchDeleteMetastorePartitions. The MetastorePartition is +// uniquely identified by values, which is an ordered list. Hence, there is no +// separate name or partition id field. +message BatchDeleteMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. The list of metastore partitions (identified by its values) to be + // deleted. A maximum of 900 partitions can be deleted in a batch. + repeated MetastorePartitionValues partition_values = 2 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. This is expected, but not required, to be + // globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Request message for UpdateMetastorePartition. +message UpdateMetastorePartitionRequest { + // Required. The metastore partition to be updated. + MetastorePartition metastore_partition = 1 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. The list of fields to update. + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = OPTIONAL]; +} + +// Request message for BatchUpdateMetastorePartitions. +message BatchUpdateMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Required. Requests to update metastore partitions in the table. + repeated UpdateMetastorePartitionRequest requests = 2 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. This is expected, but not required, to be + // globally unique. + string trace_id = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for BatchUpdateMetastorePartitions. +message BatchUpdateMetastorePartitionsResponse { + // The list of metastore partitions that have been updated. + // A maximum of 900 partitions can be updated in a batch. + repeated MetastorePartition partitions = 1; +} + +// Request message for ListMetastorePartitions. +message ListMetastorePartitionsRequest { + // Required. Reference to the table to which these metastore partitions + // belong, in the format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Optional. SQL text filtering statement, similar to a WHERE clause in a + // query. Only supports single-row expressions. Aggregate functions are not + // supported. + // + // Examples: + // * "int_field > 5" + // * "date_field = CAST('2014-9-27' as DATE)" + // * "nullable_field is not NULL" + // * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // * "numeric_field BETWEEN 1.0 AND 5.0" + // + // Restricted to a maximum length of 1 MB. + string filter = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Optional trace id to be used for debugging. It is expected that + // the client sets the same `trace_id` for all the batches in the same + // operation, so that it is possible to tie together the logs to all the + // batches in the same operation. Limited to 256 characters. This is expected, + // but not required, to be globally unique. + string trace_id = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for ListMetastorePartitions. +message ListMetastorePartitionsResponse { + // The response depends on the number of metastore partitions to be returned; + // it can be a list of partitions or a list of + // [ReadStream]((https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#readstream)) + // objects. For the second situation, the BigQuery [Read API + // ReadRows](https://cloud.google.com/bigquery/docs/reference/storage#read_from_a_session_stream) + // method must be used to stream the data and convert it into a list of + // partitions. + oneof response { + // The list of partitions. + MetastorePartitionList partitions = 1; + + // The list of streams. + StreamList streams = 2; + } +} + +// The top-level message sent by the client to the +// [Partitions.StreamMetastorePartitions][] method. +// Follows the default gRPC streaming maximum size of 4 MB. +message StreamMetastorePartitionsRequest { + // Required. Reference to the table to where the partition to be added, in the + // format of + // projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" } + ]; + + // Optional. A list of metastore partitions to be added to the table. + repeated MetastorePartition metastore_partitions = 2 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Mimics the ifNotExists flag in IMetaStoreClient + // add_partitions(..). If the flag is set to false, the server will return + // ALREADY_EXISTS on commit if any partition already exists. If the flag is + // set to true: + // 1) the server will skip existing partitions + // insert only the non-existing partitions as part of the commit. + // 2) The client must set the `skip_existing_partitions` field to true for + // all requests in the stream. + bool skip_existing_partitions = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// This is the response message sent by the server +// to the client for the [Partitions.StreamMetastorePartitions][] method when +// the commit is successful. Server will close the stream after sending this +// message. +message StreamMetastorePartitionsResponse { + // Total count of partitions streamed by the client during the lifetime of the + // stream. This is only set in the final response message before closing the + // stream. + int64 total_partitions_streamed_count = 2; + + // Total count of partitions inserted by the server during the lifetime of the + // stream. This is only set in the final response message before closing the + // stream. + int64 total_partitions_inserted_count = 3; +} + +// Structured custom error message for batch size too large error. +// The error can be attached as error details in the returned rpc Status for +// more structured error handling in the client. +message BatchSizeTooLargeError { + // The maximum number of items that are supported in a single batch. This is + // returned as a hint to the client to adjust the batch size. + int64 max_batch_size = 1; + + // Optional. The error message that is returned to the client. + string error_message = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/partition.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/partition.proto new file mode 100644 index 00000000000..a4bf0754739 --- /dev/null +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1beta/partition.proto @@ -0,0 +1,140 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.storage.v1beta; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1Beta"; +option go_package = "cloud.google.com/go/bigquery/storage/apiv1beta/storagepb;storagepb"; +option java_multiple_files = true; +option java_outer_classname = "MetastorePartitionProto"; +option java_package = "com.google.cloud.bigquery.storage.v1beta"; +option php_namespace = "Google\\Cloud\\BigQuery\\Storage\\V1beta"; + +// Schema description of a metastore partition column. +message FieldSchema { + // Required. The name of the column. + // The maximum length of the name is 1024 characters + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The type of the metastore partition column. Maximum allowed + // length is 1024 characters. + string type = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Contains information about the physical storage of the data in the metastore +// partition. +message StorageDescriptor { + // Optional. The physical location of the metastore partition + // (e.g. `gs://spark-dataproc-data/pangea-data/case_sensitive/` or + // `gs://spark-dataproc-data/pangea-data/*`). + string location_uri = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the fully qualified class name of the InputFormat + // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). + // The maximum length is 128 characters. + string input_format = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the fully qualified class name of the OutputFormat + // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). + // The maximum length is 128 characters. + string output_format = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Serializer and deserializer information. + SerDeInfo serde_info = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Serializer and deserializer information. +message SerDeInfo { + // Optional. Name of the SerDe. + // The maximum length is 256 characters. + string name = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Specifies a fully-qualified class name of the serialization + // library that is responsible for the translation of data between table + // representation and the underlying low-level input and output format + // structures. The maximum length is 256 characters. + string serialization_library = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Key-value pairs that define the initialization parameters for the + // serialization library. + // Maximum size 10 Kib. + map parameters = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Information about a Hive partition. +message MetastorePartition { + // Required. Represents the values of the partition keys, where each value + // corresponds to a specific partition key in the order in which the keys are + // defined. Each value is limited to 1024 characters. + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. The creation time of the partition. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Contains information about the physical storage of the data in + // the partition. + StorageDescriptor storage_descriptor = 3 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Additional parameters or metadata associated with the partition. + // Maximum size 10 KiB. + map parameters = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. List of columns. + repeated FieldSchema fields = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// List of metastore partitions. +message MetastorePartitionList { + // Required. List of partitions. + repeated MetastorePartition partitions = 1 + [(google.api.field_behavior) = REQUIRED]; +} + +// Information about a single stream that is used to read partitions. +message ReadStream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadStream" + pattern: "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}" + plural: "readStreams" + singular: "readStream" + }; + + // Output only. Identifier. Name of the stream, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`. + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IDENTIFIER + ]; +} + +// List of streams. +message StreamList { + // Output only. List of streams. + repeated ReadStream streams = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Represents the values of a metastore partition. +message MetastorePartitionValues { + // Required. The values of the partition keys, where each value corresponds to + // a specific partition key in the order in which the keys are defined. + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 5e917a2b256..0e82e207b13 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -4860,6 +4860,9 @@ export namespace google { /** BatchCreateMetastorePartitionsRequest skipExistingPartitions */ skipExistingPartitions?: (boolean|null); + + /** BatchCreateMetastorePartitionsRequest traceId */ + traceId?: (string|null); } /** Represents a BatchCreateMetastorePartitionsRequest. */ @@ -4880,6 +4883,9 @@ export namespace google { /** BatchCreateMetastorePartitionsRequest skipExistingPartitions. */ public skipExistingPartitions: boolean; + /** BatchCreateMetastorePartitionsRequest traceId. */ + public traceId: string; + /** * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. * @param [properties] Properties to set @@ -5063,6 +5069,9 @@ export namespace google { /** BatchDeleteMetastorePartitionsRequest partitionValues */ partitionValues?: (google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues[]|null); + + /** BatchDeleteMetastorePartitionsRequest traceId */ + traceId?: (string|null); } /** Represents a BatchDeleteMetastorePartitionsRequest. */ @@ -5080,6 +5089,9 @@ export namespace google { /** BatchDeleteMetastorePartitionsRequest partitionValues. */ public partitionValues: google.cloud.bigquery.storage.v1alpha.IMetastorePartitionValues[]; + /** BatchDeleteMetastorePartitionsRequest traceId. */ + public traceId: string; + /** * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. * @param [properties] Properties to set @@ -5269,6 +5281,9 @@ export namespace google { /** BatchUpdateMetastorePartitionsRequest requests */ requests?: (google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest[]|null); + + /** BatchUpdateMetastorePartitionsRequest traceId */ + traceId?: (string|null); } /** Represents a BatchUpdateMetastorePartitionsRequest. */ @@ -5286,6 +5301,9 @@ export namespace google { /** BatchUpdateMetastorePartitionsRequest requests. */ public requests: google.cloud.bigquery.storage.v1alpha.IUpdateMetastorePartitionRequest[]; + /** BatchUpdateMetastorePartitionsRequest traceId. */ + public traceId: string; + /** * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. * @param [properties] Properties to set @@ -5469,6 +5487,9 @@ export namespace google { /** ListMetastorePartitionsRequest filter */ filter?: (string|null); + + /** ListMetastorePartitionsRequest traceId */ + traceId?: (string|null); } /** Represents a ListMetastorePartitionsRequest. */ @@ -5486,6 +5507,9 @@ export namespace google { /** ListMetastorePartitionsRequest filter. */ public filter: string; + /** ListMetastorePartitionsRequest traceId. */ + public traceId: string; + /** * Creates a new ListMetastorePartitionsRequest instance using the specified properties. * @param [properties] Properties to set @@ -6822,6 +6846,2238 @@ export namespace google { } } + /** Namespace v1beta. */ + namespace v1beta { + + /** Represents a MetastorePartitionService */ + class MetastorePartitionService extends $protobuf.rpc.Service { + + /** + * Constructs a new MetastorePartitionService service. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + */ + constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); + + /** + * Creates new MetastorePartitionService service using the specified rpc implementation. + * @param rpcImpl RPC implementation + * @param [requestDelimited=false] Whether requests are length-delimited + * @param [responseDelimited=false] Whether responses are length-delimited + * @returns RPC service. Useful where requests and/or responses are streamed. + */ + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): MetastorePartitionService; + + /** + * Calls BatchCreateMetastorePartitions. + * @param request BatchCreateMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchCreateMetastorePartitionsResponse + */ + public batchCreateMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitionsCallback): void; + + /** + * Calls BatchCreateMetastorePartitions. + * @param request BatchCreateMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchCreateMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest): Promise; + + /** + * Calls BatchDeleteMetastorePartitions. + * @param request BatchDeleteMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and Empty + */ + public batchDeleteMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitionsCallback): void; + + /** + * Calls BatchDeleteMetastorePartitions. + * @param request BatchDeleteMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchDeleteMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest): Promise; + + /** + * Calls BatchUpdateMetastorePartitions. + * @param request BatchUpdateMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and BatchUpdateMetastorePartitionsResponse + */ + public batchUpdateMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitionsCallback): void; + + /** + * Calls BatchUpdateMetastorePartitions. + * @param request BatchUpdateMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public batchUpdateMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest): Promise; + + /** + * Calls ListMetastorePartitions. + * @param request ListMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and ListMetastorePartitionsResponse + */ + public listMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitionsCallback): void; + + /** + * Calls ListMetastorePartitions. + * @param request ListMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public listMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest): Promise; + + /** + * Calls StreamMetastorePartitions. + * @param request StreamMetastorePartitionsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and StreamMetastorePartitionsResponse + */ + public streamMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest, callback: google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitionsCallback): void; + + /** + * Calls StreamMetastorePartitions. + * @param request StreamMetastorePartitionsRequest message or plain object + * @returns Promise + */ + public streamMetastorePartitions(request: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest): Promise; + } + + namespace MetastorePartitionService { + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchCreateMetastorePartitions}. + * @param error Error, if any + * @param [response] BatchCreateMetastorePartitionsResponse + */ + type BatchCreateMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchDeleteMetastorePartitions}. + * @param error Error, if any + * @param [response] Empty + */ + type BatchDeleteMetastorePartitionsCallback = (error: (Error|null), response?: google.protobuf.Empty) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchUpdateMetastorePartitions}. + * @param error Error, if any + * @param [response] BatchUpdateMetastorePartitionsResponse + */ + type BatchUpdateMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|listMetastorePartitions}. + * @param error Error, if any + * @param [response] ListMetastorePartitionsResponse + */ + type ListMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse) => void; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|streamMetastorePartitions}. + * @param error Error, if any + * @param [response] StreamMetastorePartitionsResponse + */ + type StreamMetastorePartitionsCallback = (error: (Error|null), response?: google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse) => void; + } + + /** Properties of a CreateMetastorePartitionRequest. */ + interface ICreateMetastorePartitionRequest { + + /** CreateMetastorePartitionRequest parent */ + parent?: (string|null); + + /** CreateMetastorePartitionRequest metastorePartition */ + metastorePartition?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition|null); + } + + /** Represents a CreateMetastorePartitionRequest. */ + class CreateMetastorePartitionRequest implements ICreateMetastorePartitionRequest { + + /** + * Constructs a new CreateMetastorePartitionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest); + + /** CreateMetastorePartitionRequest parent. */ + public parent: string; + + /** CreateMetastorePartitionRequest metastorePartition. */ + public metastorePartition?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition|null); + + /** + * Creates a new CreateMetastorePartitionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns CreateMetastorePartitionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest): google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest; + + /** + * Encodes the specified CreateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.verify|verify} messages. + * @param message CreateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified CreateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.verify|verify} messages. + * @param message CreateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest; + + /** + * Verifies a CreateMetastorePartitionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a CreateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns CreateMetastorePartitionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest; + + /** + * Creates a plain object from a CreateMetastorePartitionRequest message. Also converts values to other types if specified. + * @param message CreateMetastorePartitionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this CreateMetastorePartitionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for CreateMetastorePartitionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateMetastorePartitionsRequest. */ + interface IBatchCreateMetastorePartitionsRequest { + + /** BatchCreateMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchCreateMetastorePartitionsRequest requests */ + requests?: (google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest[]|null); + + /** BatchCreateMetastorePartitionsRequest skipExistingPartitions */ + skipExistingPartitions?: (boolean|null); + + /** BatchCreateMetastorePartitionsRequest traceId */ + traceId?: (string|null); + } + + /** Represents a BatchCreateMetastorePartitionsRequest. */ + class BatchCreateMetastorePartitionsRequest implements IBatchCreateMetastorePartitionsRequest { + + /** + * Constructs a new BatchCreateMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest); + + /** BatchCreateMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchCreateMetastorePartitionsRequest requests. */ + public requests: google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest[]; + + /** BatchCreateMetastorePartitionsRequest skipExistingPartitions. */ + public skipExistingPartitions: boolean; + + /** BatchCreateMetastorePartitionsRequest traceId. */ + public traceId: string; + + /** + * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest; + + /** + * Verifies a BatchCreateMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchCreateMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchCreateMetastorePartitionsResponse. */ + interface IBatchCreateMetastorePartitionsResponse { + + /** BatchCreateMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition[]|null); + } + + /** Represents a BatchCreateMetastorePartitionsResponse. */ + class BatchCreateMetastorePartitionsResponse implements IBatchCreateMetastorePartitionsResponse { + + /** + * Constructs a new BatchCreateMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse); + + /** BatchCreateMetastorePartitionsResponse partitions. */ + public partitions: google.cloud.bigquery.storage.v1beta.IMetastorePartition[]; + + /** + * Creates a new BatchCreateMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchCreateMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse; + + /** + * Verifies a BatchCreateMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchCreateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchCreateMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message BatchCreateMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchCreateMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchDeleteMetastorePartitionsRequest. */ + interface IBatchDeleteMetastorePartitionsRequest { + + /** BatchDeleteMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchDeleteMetastorePartitionsRequest partitionValues */ + partitionValues?: (google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues[]|null); + + /** BatchDeleteMetastorePartitionsRequest traceId */ + traceId?: (string|null); + } + + /** Represents a BatchDeleteMetastorePartitionsRequest. */ + class BatchDeleteMetastorePartitionsRequest implements IBatchDeleteMetastorePartitionsRequest { + + /** + * Constructs a new BatchDeleteMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest); + + /** BatchDeleteMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchDeleteMetastorePartitionsRequest partitionValues. */ + public partitionValues: google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues[]; + + /** BatchDeleteMetastorePartitionsRequest traceId. */ + public traceId: string; + + /** + * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchDeleteMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest): google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest; + + /** + * Verifies a BatchDeleteMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchDeleteMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchDeleteMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchDeleteMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchDeleteMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchDeleteMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchDeleteMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an UpdateMetastorePartitionRequest. */ + interface IUpdateMetastorePartitionRequest { + + /** UpdateMetastorePartitionRequest metastorePartition */ + metastorePartition?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition|null); + + /** UpdateMetastorePartitionRequest updateMask */ + updateMask?: (google.protobuf.IFieldMask|null); + } + + /** Represents an UpdateMetastorePartitionRequest. */ + class UpdateMetastorePartitionRequest implements IUpdateMetastorePartitionRequest { + + /** + * Constructs a new UpdateMetastorePartitionRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest); + + /** UpdateMetastorePartitionRequest metastorePartition. */ + public metastorePartition?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition|null); + + /** UpdateMetastorePartitionRequest updateMask. */ + public updateMask?: (google.protobuf.IFieldMask|null); + + /** + * Creates a new UpdateMetastorePartitionRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns UpdateMetastorePartitionRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest): google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.verify|verify} messages. + * @param message UpdateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.verify|verify} messages. + * @param message UpdateMetastorePartitionRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest; + + /** + * Verifies an UpdateMetastorePartitionRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an UpdateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns UpdateMetastorePartitionRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest; + + /** + * Creates a plain object from an UpdateMetastorePartitionRequest message. Also converts values to other types if specified. + * @param message UpdateMetastorePartitionRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UpdateMetastorePartitionRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for UpdateMetastorePartitionRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchUpdateMetastorePartitionsRequest. */ + interface IBatchUpdateMetastorePartitionsRequest { + + /** BatchUpdateMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** BatchUpdateMetastorePartitionsRequest requests */ + requests?: (google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest[]|null); + + /** BatchUpdateMetastorePartitionsRequest traceId */ + traceId?: (string|null); + } + + /** Represents a BatchUpdateMetastorePartitionsRequest. */ + class BatchUpdateMetastorePartitionsRequest implements IBatchUpdateMetastorePartitionsRequest { + + /** + * Constructs a new BatchUpdateMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest); + + /** BatchUpdateMetastorePartitionsRequest parent. */ + public parent: string; + + /** BatchUpdateMetastorePartitionsRequest requests. */ + public requests: google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest[]; + + /** BatchUpdateMetastorePartitionsRequest traceId. */ + public traceId: string; + + /** + * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchUpdateMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest; + + /** + * Verifies a BatchUpdateMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchUpdateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchUpdateMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message BatchUpdateMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchUpdateMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchUpdateMetastorePartitionsResponse. */ + interface IBatchUpdateMetastorePartitionsResponse { + + /** BatchUpdateMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition[]|null); + } + + /** Represents a BatchUpdateMetastorePartitionsResponse. */ + class BatchUpdateMetastorePartitionsResponse implements IBatchUpdateMetastorePartitionsResponse { + + /** + * Constructs a new BatchUpdateMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse); + + /** BatchUpdateMetastorePartitionsResponse partitions. */ + public partitions: google.cloud.bigquery.storage.v1beta.IMetastorePartition[]; + + /** + * Creates a new BatchUpdateMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchUpdateMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @param message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse; + + /** + * Verifies a BatchUpdateMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchUpdateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchUpdateMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message BatchUpdateMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchUpdateMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ListMetastorePartitionsRequest. */ + interface IListMetastorePartitionsRequest { + + /** ListMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** ListMetastorePartitionsRequest filter */ + filter?: (string|null); + + /** ListMetastorePartitionsRequest traceId */ + traceId?: (string|null); + } + + /** Represents a ListMetastorePartitionsRequest. */ + class ListMetastorePartitionsRequest implements IListMetastorePartitionsRequest { + + /** + * Constructs a new ListMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest); + + /** ListMetastorePartitionsRequest parent. */ + public parent: string; + + /** ListMetastorePartitionsRequest filter. */ + public filter: string; + + /** ListMetastorePartitionsRequest traceId. */ + public traceId: string; + + /** + * Creates a new ListMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns ListMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest; + + /** + * Encodes the specified ListMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest.verify|verify} messages. + * @param message ListMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest.verify|verify} messages. + * @param message ListMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest; + + /** + * Verifies a ListMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest; + + /** + * Creates a plain object from a ListMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message ListMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ListMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ListMetastorePartitionsResponse. */ + interface IListMetastorePartitionsResponse { + + /** ListMetastorePartitionsResponse partitions */ + partitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartitionList|null); + + /** ListMetastorePartitionsResponse streams */ + streams?: (google.cloud.bigquery.storage.v1beta.IStreamList|null); + } + + /** Represents a ListMetastorePartitionsResponse. */ + class ListMetastorePartitionsResponse implements IListMetastorePartitionsResponse { + + /** + * Constructs a new ListMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse); + + /** ListMetastorePartitionsResponse partitions. */ + public partitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartitionList|null); + + /** ListMetastorePartitionsResponse streams. */ + public streams?: (google.cloud.bigquery.storage.v1beta.IStreamList|null); + + /** ListMetastorePartitionsResponse response. */ + public response?: ("partitions"|"streams"); + + /** + * Creates a new ListMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns ListMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse; + + /** + * Encodes the specified ListMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.verify|verify} messages. + * @param message ListMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ListMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.verify|verify} messages. + * @param message ListMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse; + + /** + * Verifies a ListMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ListMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ListMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse; + + /** + * Creates a plain object from a ListMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message ListMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ListMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ListMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamMetastorePartitionsRequest. */ + interface IStreamMetastorePartitionsRequest { + + /** StreamMetastorePartitionsRequest parent */ + parent?: (string|null); + + /** StreamMetastorePartitionsRequest metastorePartitions */ + metastorePartitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition[]|null); + + /** StreamMetastorePartitionsRequest skipExistingPartitions */ + skipExistingPartitions?: (boolean|null); + } + + /** Represents a StreamMetastorePartitionsRequest. */ + class StreamMetastorePartitionsRequest implements IStreamMetastorePartitionsRequest { + + /** + * Constructs a new StreamMetastorePartitionsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest); + + /** StreamMetastorePartitionsRequest parent. */ + public parent: string; + + /** StreamMetastorePartitionsRequest metastorePartitions. */ + public metastorePartitions: google.cloud.bigquery.storage.v1beta.IMetastorePartition[]; + + /** StreamMetastorePartitionsRequest skipExistingPartitions. */ + public skipExistingPartitions: boolean; + + /** + * Creates a new StreamMetastorePartitionsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamMetastorePartitionsRequest instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.verify|verify} messages. + * @param message StreamMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.verify|verify} messages. + * @param message StreamMetastorePartitionsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest; + + /** + * Verifies a StreamMetastorePartitionsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamMetastorePartitionsRequest + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest; + + /** + * Creates a plain object from a StreamMetastorePartitionsRequest message. Also converts values to other types if specified. + * @param message StreamMetastorePartitionsRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamMetastorePartitionsRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamMetastorePartitionsRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamMetastorePartitionsResponse. */ + interface IStreamMetastorePartitionsResponse { + + /** StreamMetastorePartitionsResponse totalPartitionsStreamedCount */ + totalPartitionsStreamedCount?: (number|Long|string|null); + + /** StreamMetastorePartitionsResponse totalPartitionsInsertedCount */ + totalPartitionsInsertedCount?: (number|Long|string|null); + } + + /** Represents a StreamMetastorePartitionsResponse. */ + class StreamMetastorePartitionsResponse implements IStreamMetastorePartitionsResponse { + + /** + * Constructs a new StreamMetastorePartitionsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse); + + /** StreamMetastorePartitionsResponse totalPartitionsStreamedCount. */ + public totalPartitionsStreamedCount: (number|Long|string); + + /** StreamMetastorePartitionsResponse totalPartitionsInsertedCount. */ + public totalPartitionsInsertedCount: (number|Long|string); + + /** + * Creates a new StreamMetastorePartitionsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamMetastorePartitionsResponse instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse.verify|verify} messages. + * @param message StreamMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse.verify|verify} messages. + * @param message StreamMetastorePartitionsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse; + + /** + * Verifies a StreamMetastorePartitionsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamMetastorePartitionsResponse + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse; + + /** + * Creates a plain object from a StreamMetastorePartitionsResponse message. Also converts values to other types if specified. + * @param message StreamMetastorePartitionsResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamMetastorePartitionsResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamMetastorePartitionsResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a BatchSizeTooLargeError. */ + interface IBatchSizeTooLargeError { + + /** BatchSizeTooLargeError maxBatchSize */ + maxBatchSize?: (number|Long|string|null); + + /** BatchSizeTooLargeError errorMessage */ + errorMessage?: (string|null); + } + + /** Represents a BatchSizeTooLargeError. */ + class BatchSizeTooLargeError implements IBatchSizeTooLargeError { + + /** + * Constructs a new BatchSizeTooLargeError. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError); + + /** BatchSizeTooLargeError maxBatchSize. */ + public maxBatchSize: (number|Long|string); + + /** BatchSizeTooLargeError errorMessage. */ + public errorMessage: string; + + /** + * Creates a new BatchSizeTooLargeError instance using the specified properties. + * @param [properties] Properties to set + * @returns BatchSizeTooLargeError instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError): google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError; + + /** + * Encodes the specified BatchSizeTooLargeError message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError.verify|verify} messages. + * @param message BatchSizeTooLargeError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified BatchSizeTooLargeError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError.verify|verify} messages. + * @param message BatchSizeTooLargeError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError; + + /** + * Verifies a BatchSizeTooLargeError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a BatchSizeTooLargeError message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns BatchSizeTooLargeError + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError; + + /** + * Creates a plain object from a BatchSizeTooLargeError message. Also converts values to other types if specified. + * @param message BatchSizeTooLargeError + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this BatchSizeTooLargeError to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for BatchSizeTooLargeError + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a FieldSchema. */ + interface IFieldSchema { + + /** FieldSchema name */ + name?: (string|null); + + /** FieldSchema type */ + type?: (string|null); + } + + /** Represents a FieldSchema. */ + class FieldSchema implements IFieldSchema { + + /** + * Constructs a new FieldSchema. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IFieldSchema); + + /** FieldSchema name. */ + public name: string; + + /** FieldSchema type. */ + public type: string; + + /** + * Creates a new FieldSchema instance using the specified properties. + * @param [properties] Properties to set + * @returns FieldSchema instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IFieldSchema): google.cloud.bigquery.storage.v1beta.FieldSchema; + + /** + * Encodes the specified FieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.FieldSchema.verify|verify} messages. + * @param message FieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.FieldSchema.verify|verify} messages. + * @param message FieldSchema message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IFieldSchema, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FieldSchema message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.FieldSchema; + + /** + * Decodes a FieldSchema message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.FieldSchema; + + /** + * Verifies a FieldSchema message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FieldSchema message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FieldSchema + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.FieldSchema; + + /** + * Creates a plain object from a FieldSchema message. Also converts values to other types if specified. + * @param message FieldSchema + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.FieldSchema, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FieldSchema to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FieldSchema + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StorageDescriptor. */ + interface IStorageDescriptor { + + /** StorageDescriptor locationUri */ + locationUri?: (string|null); + + /** StorageDescriptor inputFormat */ + inputFormat?: (string|null); + + /** StorageDescriptor outputFormat */ + outputFormat?: (string|null); + + /** StorageDescriptor serdeInfo */ + serdeInfo?: (google.cloud.bigquery.storage.v1beta.ISerDeInfo|null); + } + + /** Represents a StorageDescriptor. */ + class StorageDescriptor implements IStorageDescriptor { + + /** + * Constructs a new StorageDescriptor. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IStorageDescriptor); + + /** StorageDescriptor locationUri. */ + public locationUri: string; + + /** StorageDescriptor inputFormat. */ + public inputFormat: string; + + /** StorageDescriptor outputFormat. */ + public outputFormat: string; + + /** StorageDescriptor serdeInfo. */ + public serdeInfo?: (google.cloud.bigquery.storage.v1beta.ISerDeInfo|null); + + /** + * Creates a new StorageDescriptor instance using the specified properties. + * @param [properties] Properties to set + * @returns StorageDescriptor instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IStorageDescriptor): google.cloud.bigquery.storage.v1beta.StorageDescriptor; + + /** + * Encodes the specified StorageDescriptor message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StorageDescriptor.verify|verify} messages. + * @param message StorageDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IStorageDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StorageDescriptor message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StorageDescriptor.verify|verify} messages. + * @param message StorageDescriptor message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IStorageDescriptor, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.StorageDescriptor; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.StorageDescriptor; + + /** + * Verifies a StorageDescriptor message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StorageDescriptor message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StorageDescriptor + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.StorageDescriptor; + + /** + * Creates a plain object from a StorageDescriptor message. Also converts values to other types if specified. + * @param message StorageDescriptor + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.StorageDescriptor, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StorageDescriptor to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StorageDescriptor + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a SerDeInfo. */ + interface ISerDeInfo { + + /** SerDeInfo name */ + name?: (string|null); + + /** SerDeInfo serializationLibrary */ + serializationLibrary?: (string|null); + + /** SerDeInfo parameters */ + parameters?: ({ [k: string]: string }|null); + } + + /** Represents a SerDeInfo. */ + class SerDeInfo implements ISerDeInfo { + + /** + * Constructs a new SerDeInfo. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.ISerDeInfo); + + /** SerDeInfo name. */ + public name: string; + + /** SerDeInfo serializationLibrary. */ + public serializationLibrary: string; + + /** SerDeInfo parameters. */ + public parameters: { [k: string]: string }; + + /** + * Creates a new SerDeInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns SerDeInfo instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.ISerDeInfo): google.cloud.bigquery.storage.v1beta.SerDeInfo; + + /** + * Encodes the specified SerDeInfo message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.SerDeInfo.verify|verify} messages. + * @param message SerDeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.ISerDeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SerDeInfo message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.SerDeInfo.verify|verify} messages. + * @param message SerDeInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.ISerDeInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.SerDeInfo; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.SerDeInfo; + + /** + * Verifies a SerDeInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SerDeInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SerDeInfo + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.SerDeInfo; + + /** + * Creates a plain object from a SerDeInfo message. Also converts values to other types if specified. + * @param message SerDeInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.SerDeInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SerDeInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SerDeInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartition. */ + interface IMetastorePartition { + + /** MetastorePartition values */ + values?: (string[]|null); + + /** MetastorePartition createTime */ + createTime?: (google.protobuf.ITimestamp|null); + + /** MetastorePartition storageDescriptor */ + storageDescriptor?: (google.cloud.bigquery.storage.v1beta.IStorageDescriptor|null); + + /** MetastorePartition parameters */ + parameters?: ({ [k: string]: string }|null); + + /** MetastorePartition fields */ + fields?: (google.cloud.bigquery.storage.v1beta.IFieldSchema[]|null); + } + + /** Represents a MetastorePartition. */ + class MetastorePartition implements IMetastorePartition { + + /** + * Constructs a new MetastorePartition. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartition); + + /** MetastorePartition values. */ + public values: string[]; + + /** MetastorePartition createTime. */ + public createTime?: (google.protobuf.ITimestamp|null); + + /** MetastorePartition storageDescriptor. */ + public storageDescriptor?: (google.cloud.bigquery.storage.v1beta.IStorageDescriptor|null); + + /** MetastorePartition parameters. */ + public parameters: { [k: string]: string }; + + /** MetastorePartition fields. */ + public fields: google.cloud.bigquery.storage.v1beta.IFieldSchema[]; + + /** + * Creates a new MetastorePartition instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartition instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartition): google.cloud.bigquery.storage.v1beta.MetastorePartition; + + /** + * Encodes the specified MetastorePartition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartition.verify|verify} messages. + * @param message MetastorePartition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IMetastorePartition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartition.verify|verify} messages. + * @param message MetastorePartition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IMetastorePartition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.MetastorePartition; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.MetastorePartition; + + /** + * Verifies a MetastorePartition message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartition message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartition + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.MetastorePartition; + + /** + * Creates a plain object from a MetastorePartition message. Also converts values to other types if specified. + * @param message MetastorePartition + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.MetastorePartition, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartition to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartition + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartitionList. */ + interface IMetastorePartitionList { + + /** MetastorePartitionList partitions */ + partitions?: (google.cloud.bigquery.storage.v1beta.IMetastorePartition[]|null); + } + + /** Represents a MetastorePartitionList. */ + class MetastorePartitionList implements IMetastorePartitionList { + + /** + * Constructs a new MetastorePartitionList. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartitionList); + + /** MetastorePartitionList partitions. */ + public partitions: google.cloud.bigquery.storage.v1beta.IMetastorePartition[]; + + /** + * Creates a new MetastorePartitionList instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartitionList instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartitionList): google.cloud.bigquery.storage.v1beta.MetastorePartitionList; + + /** + * Encodes the specified MetastorePartitionList message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionList.verify|verify} messages. + * @param message MetastorePartitionList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IMetastorePartitionList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartitionList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionList.verify|verify} messages. + * @param message MetastorePartitionList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IMetastorePartitionList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.MetastorePartitionList; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.MetastorePartitionList; + + /** + * Verifies a MetastorePartitionList message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartitionList message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartitionList + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.MetastorePartitionList; + + /** + * Creates a plain object from a MetastorePartitionList message. Also converts values to other types if specified. + * @param message MetastorePartitionList + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.MetastorePartitionList, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartitionList to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartitionList + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ReadStream. */ + interface IReadStream { + + /** ReadStream name */ + name?: (string|null); + } + + /** Represents a ReadStream. */ + class ReadStream implements IReadStream { + + /** + * Constructs a new ReadStream. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IReadStream); + + /** ReadStream name. */ + public name: string; + + /** + * Creates a new ReadStream instance using the specified properties. + * @param [properties] Properties to set + * @returns ReadStream instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IReadStream): google.cloud.bigquery.storage.v1beta.ReadStream; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ReadStream.verify|verify} messages. + * @param message ReadStream message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IReadStream, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.ReadStream; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.ReadStream; + + /** + * Verifies a ReadStream message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ReadStream + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.ReadStream; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @param message ReadStream + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.ReadStream, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ReadStream to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ReadStream + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a StreamList. */ + interface IStreamList { + + /** StreamList streams */ + streams?: (google.cloud.bigquery.storage.v1beta.IReadStream[]|null); + } + + /** Represents a StreamList. */ + class StreamList implements IStreamList { + + /** + * Constructs a new StreamList. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IStreamList); + + /** StreamList streams. */ + public streams: google.cloud.bigquery.storage.v1beta.IReadStream[]; + + /** + * Creates a new StreamList instance using the specified properties. + * @param [properties] Properties to set + * @returns StreamList instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IStreamList): google.cloud.bigquery.storage.v1beta.StreamList; + + /** + * Encodes the specified StreamList message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamList.verify|verify} messages. + * @param message StreamList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IStreamList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified StreamList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamList.verify|verify} messages. + * @param message StreamList message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IStreamList, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a StreamList message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.StreamList; + + /** + * Decodes a StreamList message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.StreamList; + + /** + * Verifies a StreamList message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a StreamList message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns StreamList + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.StreamList; + + /** + * Creates a plain object from a StreamList message. Also converts values to other types if specified. + * @param message StreamList + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.StreamList, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this StreamList to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for StreamList + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MetastorePartitionValues. */ + interface IMetastorePartitionValues { + + /** MetastorePartitionValues values */ + values?: (string[]|null); + } + + /** Represents a MetastorePartitionValues. */ + class MetastorePartitionValues implements IMetastorePartitionValues { + + /** + * Constructs a new MetastorePartitionValues. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues); + + /** MetastorePartitionValues values. */ + public values: string[]; + + /** + * Creates a new MetastorePartitionValues instance using the specified properties. + * @param [properties] Properties to set + * @returns MetastorePartitionValues instance + */ + public static create(properties?: google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues): google.cloud.bigquery.storage.v1beta.MetastorePartitionValues; + + /** + * Encodes the specified MetastorePartitionValues message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.verify|verify} messages. + * @param message MetastorePartitionValues message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MetastorePartitionValues message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.verify|verify} messages. + * @param message MetastorePartitionValues message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.storage.v1beta.MetastorePartitionValues; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.storage.v1beta.MetastorePartitionValues; + + /** + * Verifies a MetastorePartitionValues message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MetastorePartitionValues message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MetastorePartitionValues + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.storage.v1beta.MetastorePartitionValues; + + /** + * Creates a plain object from a MetastorePartitionValues message. Also converts values to other types if specified. + * @param message MetastorePartitionValues + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.storage.v1beta.MetastorePartitionValues, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MetastorePartitionValues to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MetastorePartitionValues + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Namespace v1beta1. */ namespace v1beta1 { diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 5058f54c6c2..dd704459344 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -12023,6 +12023,7 @@ * @property {string|null} [parent] BatchCreateMetastorePartitionsRequest parent * @property {Array.|null} [requests] BatchCreateMetastorePartitionsRequest requests * @property {boolean|null} [skipExistingPartitions] BatchCreateMetastorePartitionsRequest skipExistingPartitions + * @property {string|null} [traceId] BatchCreateMetastorePartitionsRequest traceId */ /** @@ -12065,6 +12066,14 @@ */ BatchCreateMetastorePartitionsRequest.prototype.skipExistingPartitions = false; + /** + * BatchCreateMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.traceId = ""; + /** * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. * @function create @@ -12096,6 +12105,8 @@ $root.google.cloud.bigquery.storage.v1alpha.CreateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.skipExistingPartitions != null && Object.hasOwnProperty.call(message, "skipExistingPartitions")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.skipExistingPartitions); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); return writer; }; @@ -12146,6 +12157,10 @@ message.skipExistingPartitions = reader.bool(); break; } + case 4: { + message.traceId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -12196,6 +12211,9 @@ if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) if (typeof message.skipExistingPartitions !== "boolean") return "skipExistingPartitions: boolean expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; @@ -12225,6 +12243,8 @@ } if (object.skipExistingPartitions != null) message.skipExistingPartitions = Boolean(object.skipExistingPartitions); + if (object.traceId != null) + message.traceId = String(object.traceId); return message; }; @@ -12246,6 +12266,7 @@ if (options.defaults) { object.parent = ""; object.skipExistingPartitions = false; + object.traceId = ""; } if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; @@ -12256,6 +12277,8 @@ } if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) object.skipExistingPartitions = message.skipExistingPartitions; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; @@ -12522,6 +12545,7 @@ * @interface IBatchDeleteMetastorePartitionsRequest * @property {string|null} [parent] BatchDeleteMetastorePartitionsRequest parent * @property {Array.|null} [partitionValues] BatchDeleteMetastorePartitionsRequest partitionValues + * @property {string|null} [traceId] BatchDeleteMetastorePartitionsRequest traceId */ /** @@ -12556,6 +12580,14 @@ */ BatchDeleteMetastorePartitionsRequest.prototype.partitionValues = $util.emptyArray; + /** + * BatchDeleteMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.traceId = ""; + /** * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. * @function create @@ -12585,6 +12617,8 @@ if (message.partitionValues != null && message.partitionValues.length) for (var i = 0; i < message.partitionValues.length; ++i) $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.encode(message.partitionValues[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); return writer; }; @@ -12631,6 +12665,10 @@ message.partitionValues.push($root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.decode(reader, reader.uint32())); break; } + case 4: { + message.traceId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -12678,6 +12716,9 @@ return "partitionValues." + error; } } + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; @@ -12705,6 +12746,8 @@ message.partitionValues[i] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.fromObject(object.partitionValues[i]); } } + if (object.traceId != null) + message.traceId = String(object.traceId); return message; }; @@ -12723,8 +12766,10 @@ var object = {}; if (options.arrays || options.defaults) object.partitionValues = []; - if (options.defaults) + if (options.defaults) { object.parent = ""; + object.traceId = ""; + } if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; if (message.partitionValues && message.partitionValues.length) { @@ -12732,6 +12777,8 @@ for (var j = 0; j < message.partitionValues.length; ++j) object.partitionValues[j] = $root.google.cloud.bigquery.storage.v1alpha.MetastorePartitionValues.toObject(message.partitionValues[j], options); } + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; @@ -13011,6 +13058,7 @@ * @interface IBatchUpdateMetastorePartitionsRequest * @property {string|null} [parent] BatchUpdateMetastorePartitionsRequest parent * @property {Array.|null} [requests] BatchUpdateMetastorePartitionsRequest requests + * @property {string|null} [traceId] BatchUpdateMetastorePartitionsRequest traceId */ /** @@ -13045,6 +13093,14 @@ */ BatchUpdateMetastorePartitionsRequest.prototype.requests = $util.emptyArray; + /** + * BatchUpdateMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.traceId = ""; + /** * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. * @function create @@ -13074,6 +13130,8 @@ if (message.requests != null && message.requests.length) for (var i = 0; i < message.requests.length; ++i) $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); return writer; }; @@ -13120,6 +13178,10 @@ message.requests.push($root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.decode(reader, reader.uint32())); break; } + case 4: { + message.traceId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -13167,6 +13229,9 @@ return "requests." + error; } } + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; @@ -13194,6 +13259,8 @@ message.requests[i] = $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.fromObject(object.requests[i]); } } + if (object.traceId != null) + message.traceId = String(object.traceId); return message; }; @@ -13212,8 +13279,10 @@ var object = {}; if (options.arrays || options.defaults) object.requests = []; - if (options.defaults) + if (options.defaults) { object.parent = ""; + object.traceId = ""; + } if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; if (message.requests && message.requests.length) { @@ -13221,6 +13290,8 @@ for (var j = 0; j < message.requests.length; ++j) object.requests[j] = $root.google.cloud.bigquery.storage.v1alpha.UpdateMetastorePartitionRequest.toObject(message.requests[j], options); } + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; @@ -13487,6 +13558,7 @@ * @interface IListMetastorePartitionsRequest * @property {string|null} [parent] ListMetastorePartitionsRequest parent * @property {string|null} [filter] ListMetastorePartitionsRequest filter + * @property {string|null} [traceId] ListMetastorePartitionsRequest traceId */ /** @@ -13520,6 +13592,14 @@ */ ListMetastorePartitionsRequest.prototype.filter = ""; + /** + * ListMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.traceId = ""; + /** * Creates a new ListMetastorePartitionsRequest instance using the specified properties. * @function create @@ -13548,6 +13628,8 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); if (message.filter != null && Object.hasOwnProperty.call(message, "filter")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.filter); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.traceId); return writer; }; @@ -13592,6 +13674,10 @@ message.filter = reader.string(); break; } + case 3: { + message.traceId = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -13633,6 +13719,9 @@ if (message.filter != null && message.hasOwnProperty("filter")) if (!$util.isString(message.filter)) return "filter: string expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; return null; }; @@ -13652,6 +13741,8 @@ message.parent = String(object.parent); if (object.filter != null) message.filter = String(object.filter); + if (object.traceId != null) + message.traceId = String(object.traceId); return message; }; @@ -13671,11 +13762,14 @@ if (options.defaults) { object.parent = ""; object.filter = ""; + object.traceId = ""; } if (message.parent != null && message.hasOwnProperty("parent")) object.parent = message.parent; if (message.filter != null && message.hasOwnProperty("filter")) object.filter = message.filter; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; return object; }; @@ -16807,6 +16901,5336 @@ return v1alpha; })(); + storage.v1beta = (function() { + + /** + * Namespace v1beta. + * @memberof google.cloud.bigquery.storage + * @namespace + */ + var v1beta = {}; + + v1beta.MetastorePartitionService = (function() { + + /** + * Constructs a new MetastorePartitionService service. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a MetastorePartitionService + * @extends $protobuf.rpc.Service + * @constructor + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + */ + function MetastorePartitionService(rpcImpl, requestDelimited, responseDelimited) { + $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); + } + + (MetastorePartitionService.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = MetastorePartitionService; + + /** + * Creates new MetastorePartitionService service using the specified rpc implementation. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @static + * @param {$protobuf.RPCImpl} rpcImpl RPC implementation + * @param {boolean} [requestDelimited=false] Whether requests are length-delimited + * @param {boolean} [responseDelimited=false] Whether responses are length-delimited + * @returns {MetastorePartitionService} RPC service. Useful where requests and/or responses are streamed. + */ + MetastorePartitionService.create = function create(rpcImpl, requestDelimited, responseDelimited) { + return new this(rpcImpl, requestDelimited, responseDelimited); + }; + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchCreateMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @typedef BatchCreateMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} [response] BatchCreateMetastorePartitionsResponse + */ + + /** + * Calls BatchCreateMetastorePartitions. + * @function batchCreateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest} request BatchCreateMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and BatchCreateMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchCreateMetastorePartitions = function batchCreateMetastorePartitions(request, callback) { + return this.rpcCall(batchCreateMetastorePartitions, $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse, request, callback); + }, "name", { value: "BatchCreateMetastorePartitions" }); + + /** + * Calls BatchCreateMetastorePartitions. + * @function batchCreateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest} request BatchCreateMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchDeleteMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @typedef BatchDeleteMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.protobuf.Empty} [response] Empty + */ + + /** + * Calls BatchDeleteMetastorePartitions. + * @function batchDeleteMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest} request BatchDeleteMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and Empty + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchDeleteMetastorePartitions = function batchDeleteMetastorePartitions(request, callback) { + return this.rpcCall(batchDeleteMetastorePartitions, $root.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest, $root.google.protobuf.Empty, request, callback); + }, "name", { value: "BatchDeleteMetastorePartitions" }); + + /** + * Calls BatchDeleteMetastorePartitions. + * @function batchDeleteMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest} request BatchDeleteMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|batchUpdateMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @typedef BatchUpdateMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} [response] BatchUpdateMetastorePartitionsResponse + */ + + /** + * Calls BatchUpdateMetastorePartitions. + * @function batchUpdateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest} request BatchUpdateMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and BatchUpdateMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.batchUpdateMetastorePartitions = function batchUpdateMetastorePartitions(request, callback) { + return this.rpcCall(batchUpdateMetastorePartitions, $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse, request, callback); + }, "name", { value: "BatchUpdateMetastorePartitions" }); + + /** + * Calls BatchUpdateMetastorePartitions. + * @function batchUpdateMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest} request BatchUpdateMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|listMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @typedef ListMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} [response] ListMetastorePartitionsResponse + */ + + /** + * Calls ListMetastorePartitions. + * @function listMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest} request ListMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and ListMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.listMetastorePartitions = function listMetastorePartitions(request, callback) { + return this.rpcCall(listMetastorePartitions, $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse, request, callback); + }, "name", { value: "ListMetastorePartitions" }); + + /** + * Calls ListMetastorePartitions. + * @function listMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest} request ListMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionService|streamMetastorePartitions}. + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @typedef StreamMetastorePartitionsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} [response] StreamMetastorePartitionsResponse + */ + + /** + * Calls StreamMetastorePartitions. + * @function streamMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest} request StreamMetastorePartitionsRequest message or plain object + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitionsCallback} callback Node-style callback called with the error, if any, and StreamMetastorePartitionsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(MetastorePartitionService.prototype.streamMetastorePartitions = function streamMetastorePartitions(request, callback) { + return this.rpcCall(streamMetastorePartitions, $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest, $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse, request, callback); + }, "name", { value: "StreamMetastorePartitions" }); + + /** + * Calls StreamMetastorePartitions. + * @function streamMetastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionService + * @instance + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest} request StreamMetastorePartitionsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + return MetastorePartitionService; + })(); + + v1beta.CreateMetastorePartitionRequest = (function() { + + /** + * Properties of a CreateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface ICreateMetastorePartitionRequest + * @property {string|null} [parent] CreateMetastorePartitionRequest parent + * @property {google.cloud.bigquery.storage.v1beta.IMetastorePartition|null} [metastorePartition] CreateMetastorePartitionRequest metastorePartition + */ + + /** + * Constructs a new CreateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a CreateMetastorePartitionRequest. + * @implements ICreateMetastorePartitionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest=} [properties] Properties to set + */ + function CreateMetastorePartitionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * CreateMetastorePartitionRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @instance + */ + CreateMetastorePartitionRequest.prototype.parent = ""; + + /** + * CreateMetastorePartitionRequest metastorePartition. + * @member {google.cloud.bigquery.storage.v1beta.IMetastorePartition|null|undefined} metastorePartition + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @instance + */ + CreateMetastorePartitionRequest.prototype.metastorePartition = null; + + /** + * Creates a new CreateMetastorePartitionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest instance + */ + CreateMetastorePartitionRequest.create = function create(properties) { + return new CreateMetastorePartitionRequest(properties); + }; + + /** + * Encodes the specified CreateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest} message CreateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateMetastorePartitionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.metastorePartition != null && Object.hasOwnProperty.call(message, "metastorePartition")) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.metastorePartition, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified CreateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.ICreateMetastorePartitionRequest} message CreateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + CreateMetastorePartitionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateMetastorePartitionRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a CreateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + CreateMetastorePartitionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a CreateMetastorePartitionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + CreateMetastorePartitionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.metastorePartition); + if (error) + return "metastorePartition." + error; + } + return null; + }; + + /** + * Creates a CreateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest} CreateMetastorePartitionRequest + */ + CreateMetastorePartitionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.metastorePartition != null) { + if (typeof object.metastorePartition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.metastorePartition: object expected"); + message.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.metastorePartition); + } + return message; + }; + + /** + * Creates a plain object from a CreateMetastorePartitionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest} message CreateMetastorePartitionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + CreateMetastorePartitionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.metastorePartition = null; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) + object.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.metastorePartition, options); + return object; + }; + + /** + * Converts this CreateMetastorePartitionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @instance + * @returns {Object.} JSON object + */ + CreateMetastorePartitionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for CreateMetastorePartitionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + CreateMetastorePartitionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest"; + }; + + return CreateMetastorePartitionRequest; + })(); + + v1beta.BatchCreateMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchCreateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchCreateMetastorePartitionsRequest + * @property {string|null} [parent] BatchCreateMetastorePartitionsRequest parent + * @property {Array.|null} [requests] BatchCreateMetastorePartitionsRequest requests + * @property {boolean|null} [skipExistingPartitions] BatchCreateMetastorePartitionsRequest skipExistingPartitions + * @property {string|null} [traceId] BatchCreateMetastorePartitionsRequest traceId + */ + + /** + * Constructs a new BatchCreateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchCreateMetastorePartitionsRequest. + * @implements IBatchCreateMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchCreateMetastorePartitionsRequest(properties) { + this.requests = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchCreateMetastorePartitionsRequest requests. + * @member {Array.} requests + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.requests = $util.emptyArray; + + /** + * BatchCreateMetastorePartitionsRequest skipExistingPartitions. + * @member {boolean} skipExistingPartitions + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.skipExistingPartitions = false; + + /** + * BatchCreateMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @instance + */ + BatchCreateMetastorePartitionsRequest.prototype.traceId = ""; + + /** + * Creates a new BatchCreateMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest instance + */ + BatchCreateMetastorePartitionsRequest.create = function create(properties) { + return new BatchCreateMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.requests != null && message.requests.length) + for (var i = 0; i < message.requests.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.skipExistingPartitions != null && Object.hasOwnProperty.call(message, "skipExistingPartitions")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.skipExistingPartitions); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.requests && message.requests.length)) + message.requests = []; + message.requests.push($root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.decode(reader, reader.uint32())); + break; + } + case 3: { + message.skipExistingPartitions = reader.bool(); + break; + } + case 4: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.requests != null && message.hasOwnProperty("requests")) { + if (!Array.isArray(message.requests)) + return "requests: array expected"; + for (var i = 0; i < message.requests.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.verify(message.requests[i]); + if (error) + return "requests." + error; + } + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + if (typeof message.skipExistingPartitions !== "boolean") + return "skipExistingPartitions: boolean expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates a BatchCreateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest} BatchCreateMetastorePartitionsRequest + */ + BatchCreateMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.requests) { + if (!Array.isArray(object.requests)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.requests: array expected"); + message.requests = []; + for (var i = 0; i < object.requests.length; ++i) { + if (typeof object.requests[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest.requests: object expected"); + message.requests[i] = $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.fromObject(object.requests[i]); + } + } + if (object.skipExistingPartitions != null) + message.skipExistingPartitions = Boolean(object.skipExistingPartitions); + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest} message BatchCreateMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.requests = []; + if (options.defaults) { + object.parent = ""; + object.skipExistingPartitions = false; + object.traceId = ""; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.requests && message.requests.length) { + object.requests = []; + for (var j = 0; j < message.requests.length; ++j) + object.requests[j] = $root.google.cloud.bigquery.storage.v1beta.CreateMetastorePartitionRequest.toObject(message.requests[j], options); + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + object.skipExistingPartitions = message.skipExistingPartitions; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this BatchCreateMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchCreateMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest"; + }; + + return BatchCreateMetastorePartitionsRequest; + })(); + + v1beta.BatchCreateMetastorePartitionsResponse = (function() { + + /** + * Properties of a BatchCreateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchCreateMetastorePartitionsResponse + * @property {Array.|null} [partitions] BatchCreateMetastorePartitionsResponse partitions + */ + + /** + * Constructs a new BatchCreateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchCreateMetastorePartitionsResponse. + * @implements IBatchCreateMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse=} [properties] Properties to set + */ + function BatchCreateMetastorePartitionsResponse(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchCreateMetastorePartitionsResponse partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @instance + */ + BatchCreateMetastorePartitionsResponse.prototype.partitions = $util.emptyArray; + + /** + * Creates a new BatchCreateMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse instance + */ + BatchCreateMetastorePartitionsResponse.create = function create(properties) { + return new BatchCreateMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchCreateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchCreateMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsResponse.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchCreateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchCreateMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchCreateMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchCreateMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a BatchCreateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} BatchCreateMetastorePartitionsResponse + */ + BatchCreateMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchCreateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse} message BatchCreateMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchCreateMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this BatchCreateMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchCreateMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchCreateMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchCreateMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse"; + }; + + return BatchCreateMetastorePartitionsResponse; + })(); + + v1beta.BatchDeleteMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchDeleteMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchDeleteMetastorePartitionsRequest + * @property {string|null} [parent] BatchDeleteMetastorePartitionsRequest parent + * @property {Array.|null} [partitionValues] BatchDeleteMetastorePartitionsRequest partitionValues + * @property {string|null} [traceId] BatchDeleteMetastorePartitionsRequest traceId + */ + + /** + * Constructs a new BatchDeleteMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchDeleteMetastorePartitionsRequest. + * @implements IBatchDeleteMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchDeleteMetastorePartitionsRequest(properties) { + this.partitionValues = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchDeleteMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchDeleteMetastorePartitionsRequest partitionValues. + * @member {Array.} partitionValues + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.partitionValues = $util.emptyArray; + + /** + * BatchDeleteMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @instance + */ + BatchDeleteMetastorePartitionsRequest.prototype.traceId = ""; + + /** + * Creates a new BatchDeleteMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest instance + */ + BatchDeleteMetastorePartitionsRequest.create = function create(properties) { + return new BatchDeleteMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchDeleteMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.partitionValues != null && message.partitionValues.length) + for (var i = 0; i < message.partitionValues.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.encode(message.partitionValues[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified BatchDeleteMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchDeleteMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchDeleteMetastorePartitionsRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.partitionValues && message.partitionValues.length)) + message.partitionValues = []; + message.partitionValues.push($root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.decode(reader, reader.uint32())); + break; + } + case 4: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchDeleteMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchDeleteMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchDeleteMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchDeleteMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.partitionValues != null && message.hasOwnProperty("partitionValues")) { + if (!Array.isArray(message.partitionValues)) + return "partitionValues: array expected"; + for (var i = 0; i < message.partitionValues.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.verify(message.partitionValues[i]); + if (error) + return "partitionValues." + error; + } + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates a BatchDeleteMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest} BatchDeleteMetastorePartitionsRequest + */ + BatchDeleteMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.partitionValues) { + if (!Array.isArray(object.partitionValues)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.partitionValues: array expected"); + message.partitionValues = []; + for (var i = 0; i < object.partitionValues.length; ++i) { + if (typeof object.partitionValues[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest.partitionValues: object expected"); + message.partitionValues[i] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.fromObject(object.partitionValues[i]); + } + } + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from a BatchDeleteMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest} message BatchDeleteMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchDeleteMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitionValues = []; + if (options.defaults) { + object.parent = ""; + object.traceId = ""; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.partitionValues && message.partitionValues.length) { + object.partitionValues = []; + for (var j = 0; j < message.partitionValues.length; ++j) + object.partitionValues[j] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.toObject(message.partitionValues[j], options); + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this BatchDeleteMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchDeleteMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchDeleteMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchDeleteMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest"; + }; + + return BatchDeleteMetastorePartitionsRequest; + })(); + + v1beta.UpdateMetastorePartitionRequest = (function() { + + /** + * Properties of an UpdateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IUpdateMetastorePartitionRequest + * @property {google.cloud.bigquery.storage.v1beta.IMetastorePartition|null} [metastorePartition] UpdateMetastorePartitionRequest metastorePartition + * @property {google.protobuf.IFieldMask|null} [updateMask] UpdateMetastorePartitionRequest updateMask + */ + + /** + * Constructs a new UpdateMetastorePartitionRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents an UpdateMetastorePartitionRequest. + * @implements IUpdateMetastorePartitionRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest=} [properties] Properties to set + */ + function UpdateMetastorePartitionRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * UpdateMetastorePartitionRequest metastorePartition. + * @member {google.cloud.bigquery.storage.v1beta.IMetastorePartition|null|undefined} metastorePartition + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @instance + */ + UpdateMetastorePartitionRequest.prototype.metastorePartition = null; + + /** + * UpdateMetastorePartitionRequest updateMask. + * @member {google.protobuf.IFieldMask|null|undefined} updateMask + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @instance + */ + UpdateMetastorePartitionRequest.prototype.updateMask = null; + + /** + * Creates a new UpdateMetastorePartitionRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest instance + */ + UpdateMetastorePartitionRequest.create = function create(properties) { + return new UpdateMetastorePartitionRequest(properties); + }; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UpdateMetastorePartitionRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.metastorePartition != null && Object.hasOwnProperty.call(message, "metastorePartition")) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.metastorePartition, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.updateMask != null && Object.hasOwnProperty.call(message, "updateMask")) + $root.google.protobuf.FieldMask.encode(message.updateMask, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified UpdateMetastorePartitionRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IUpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + UpdateMetastorePartitionRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UpdateMetastorePartitionRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32()); + break; + } + case 2: { + message.updateMask = $root.google.protobuf.FieldMask.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an UpdateMetastorePartitionRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + UpdateMetastorePartitionRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an UpdateMetastorePartitionRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + UpdateMetastorePartitionRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.metastorePartition); + if (error) + return "metastorePartition." + error; + } + if (message.updateMask != null && message.hasOwnProperty("updateMask")) { + var error = $root.google.protobuf.FieldMask.verify(message.updateMask); + if (error) + return "updateMask." + error; + } + return null; + }; + + /** + * Creates an UpdateMetastorePartitionRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest} UpdateMetastorePartitionRequest + */ + UpdateMetastorePartitionRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest(); + if (object.metastorePartition != null) { + if (typeof object.metastorePartition !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.metastorePartition: object expected"); + message.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.metastorePartition); + } + if (object.updateMask != null) { + if (typeof object.updateMask !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.updateMask: object expected"); + message.updateMask = $root.google.protobuf.FieldMask.fromObject(object.updateMask); + } + return message; + }; + + /** + * Creates a plain object from an UpdateMetastorePartitionRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest} message UpdateMetastorePartitionRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + UpdateMetastorePartitionRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.metastorePartition = null; + object.updateMask = null; + } + if (message.metastorePartition != null && message.hasOwnProperty("metastorePartition")) + object.metastorePartition = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.metastorePartition, options); + if (message.updateMask != null && message.hasOwnProperty("updateMask")) + object.updateMask = $root.google.protobuf.FieldMask.toObject(message.updateMask, options); + return object; + }; + + /** + * Converts this UpdateMetastorePartitionRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @instance + * @returns {Object.} JSON object + */ + UpdateMetastorePartitionRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for UpdateMetastorePartitionRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + UpdateMetastorePartitionRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest"; + }; + + return UpdateMetastorePartitionRequest; + })(); + + v1beta.BatchUpdateMetastorePartitionsRequest = (function() { + + /** + * Properties of a BatchUpdateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchUpdateMetastorePartitionsRequest + * @property {string|null} [parent] BatchUpdateMetastorePartitionsRequest parent + * @property {Array.|null} [requests] BatchUpdateMetastorePartitionsRequest requests + * @property {string|null} [traceId] BatchUpdateMetastorePartitionsRequest traceId + */ + + /** + * Constructs a new BatchUpdateMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchUpdateMetastorePartitionsRequest. + * @implements IBatchUpdateMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest=} [properties] Properties to set + */ + function BatchUpdateMetastorePartitionsRequest(properties) { + this.requests = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchUpdateMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.parent = ""; + + /** + * BatchUpdateMetastorePartitionsRequest requests. + * @member {Array.} requests + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.requests = $util.emptyArray; + + /** + * BatchUpdateMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @instance + */ + BatchUpdateMetastorePartitionsRequest.prototype.traceId = ""; + + /** + * Creates a new BatchUpdateMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest instance + */ + BatchUpdateMetastorePartitionsRequest.create = function create(properties) { + return new BatchUpdateMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.requests != null && message.requests.length) + for (var i = 0; i < message.requests.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.encode(message.requests[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.requests && message.requests.length)) + message.requests = []; + message.requests.push($root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.decode(reader, reader.uint32())); + break; + } + case 4: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchUpdateMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchUpdateMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.requests != null && message.hasOwnProperty("requests")) { + if (!Array.isArray(message.requests)) + return "requests: array expected"; + for (var i = 0; i < message.requests.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.verify(message.requests[i]); + if (error) + return "requests." + error; + } + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates a BatchUpdateMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest} BatchUpdateMetastorePartitionsRequest + */ + BatchUpdateMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.requests) { + if (!Array.isArray(object.requests)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.requests: array expected"); + message.requests = []; + for (var i = 0; i < object.requests.length; ++i) { + if (typeof object.requests[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest.requests: object expected"); + message.requests[i] = $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.fromObject(object.requests[i]); + } + } + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest} message BatchUpdateMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchUpdateMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.requests = []; + if (options.defaults) { + object.parent = ""; + object.traceId = ""; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.requests && message.requests.length) { + object.requests = []; + for (var j = 0; j < message.requests.length; ++j) + object.requests[j] = $root.google.cloud.bigquery.storage.v1beta.UpdateMetastorePartitionRequest.toObject(message.requests[j], options); + } + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this BatchUpdateMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + BatchUpdateMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchUpdateMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest"; + }; + + return BatchUpdateMetastorePartitionsRequest; + })(); + + v1beta.BatchUpdateMetastorePartitionsResponse = (function() { + + /** + * Properties of a BatchUpdateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchUpdateMetastorePartitionsResponse + * @property {Array.|null} [partitions] BatchUpdateMetastorePartitionsResponse partitions + */ + + /** + * Constructs a new BatchUpdateMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchUpdateMetastorePartitionsResponse. + * @implements IBatchUpdateMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse=} [properties] Properties to set + */ + function BatchUpdateMetastorePartitionsResponse(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchUpdateMetastorePartitionsResponse partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @instance + */ + BatchUpdateMetastorePartitionsResponse.prototype.partitions = $util.emptyArray; + + /** + * Creates a new BatchUpdateMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse instance + */ + BatchUpdateMetastorePartitionsResponse.create = function create(properties) { + return new BatchUpdateMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified BatchUpdateMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchUpdateMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsResponse.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchUpdateMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchUpdateMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchUpdateMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchUpdateMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a BatchUpdateMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} BatchUpdateMetastorePartitionsResponse + */ + BatchUpdateMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a BatchUpdateMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse} message BatchUpdateMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchUpdateMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this BatchUpdateMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + BatchUpdateMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchUpdateMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchUpdateMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse"; + }; + + return BatchUpdateMetastorePartitionsResponse; + })(); + + v1beta.ListMetastorePartitionsRequest = (function() { + + /** + * Properties of a ListMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IListMetastorePartitionsRequest + * @property {string|null} [parent] ListMetastorePartitionsRequest parent + * @property {string|null} [filter] ListMetastorePartitionsRequest filter + * @property {string|null} [traceId] ListMetastorePartitionsRequest traceId + */ + + /** + * Constructs a new ListMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a ListMetastorePartitionsRequest. + * @implements IListMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest=} [properties] Properties to set + */ + function ListMetastorePartitionsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.parent = ""; + + /** + * ListMetastorePartitionsRequest filter. + * @member {string} filter + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.filter = ""; + + /** + * ListMetastorePartitionsRequest traceId. + * @member {string} traceId + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @instance + */ + ListMetastorePartitionsRequest.prototype.traceId = ""; + + /** + * Creates a new ListMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest instance + */ + ListMetastorePartitionsRequest.create = function create(properties) { + return new ListMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified ListMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest} message ListMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.filter != null && Object.hasOwnProperty.call(message, "filter")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.filter); + if (message.traceId != null && Object.hasOwnProperty.call(message, "traceId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.traceId); + return writer; + }; + + /** + * Encodes the specified ListMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest} message ListMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + message.filter = reader.string(); + break; + } + case 3: { + message.traceId = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.filter != null && message.hasOwnProperty("filter")) + if (!$util.isString(message.filter)) + return "filter: string expected"; + if (message.traceId != null && message.hasOwnProperty("traceId")) + if (!$util.isString(message.traceId)) + return "traceId: string expected"; + return null; + }; + + /** + * Creates a ListMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest} ListMetastorePartitionsRequest + */ + ListMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.filter != null) + message.filter = String(object.filter); + if (object.traceId != null) + message.traceId = String(object.traceId); + return message; + }; + + /** + * Creates a plain object from a ListMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest} message ListMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.parent = ""; + object.filter = ""; + object.traceId = ""; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.filter != null && message.hasOwnProperty("filter")) + object.filter = message.filter; + if (message.traceId != null && message.hasOwnProperty("traceId")) + object.traceId = message.traceId; + return object; + }; + + /** + * Converts this ListMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + ListMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest"; + }; + + return ListMetastorePartitionsRequest; + })(); + + v1beta.ListMetastorePartitionsResponse = (function() { + + /** + * Properties of a ListMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IListMetastorePartitionsResponse + * @property {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList|null} [partitions] ListMetastorePartitionsResponse partitions + * @property {google.cloud.bigquery.storage.v1beta.IStreamList|null} [streams] ListMetastorePartitionsResponse streams + */ + + /** + * Constructs a new ListMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a ListMetastorePartitionsResponse. + * @implements IListMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse=} [properties] Properties to set + */ + function ListMetastorePartitionsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ListMetastorePartitionsResponse partitions. + * @member {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList|null|undefined} partitions + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @instance + */ + ListMetastorePartitionsResponse.prototype.partitions = null; + + /** + * ListMetastorePartitionsResponse streams. + * @member {google.cloud.bigquery.storage.v1beta.IStreamList|null|undefined} streams + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @instance + */ + ListMetastorePartitionsResponse.prototype.streams = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ListMetastorePartitionsResponse response. + * @member {"partitions"|"streams"|undefined} response + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @instance + */ + Object.defineProperty(ListMetastorePartitionsResponse.prototype, "response", { + get: $util.oneOfGetter($oneOfFields = ["partitions", "streams"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ListMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse instance + */ + ListMetastorePartitionsResponse.create = function create(properties) { + return new ListMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified ListMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse} message ListMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && Object.hasOwnProperty.call(message, "partitions")) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList.encode(message.partitions, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.streams != null && Object.hasOwnProperty.call(message, "streams")) + $root.google.cloud.bigquery.storage.v1beta.StreamList.encode(message.streams, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ListMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse} message ListMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ListMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsResponse.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.partitions = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList.decode(reader, reader.uint32()); + break; + } + case 2: { + message.streams = $root.google.cloud.bigquery.storage.v1beta.StreamList.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ListMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ListMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ListMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ListMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList.verify(message.partitions); + if (error) + return "partitions." + error; + } + } + if (message.streams != null && message.hasOwnProperty("streams")) { + if (properties.response === 1) + return "response: multiple values"; + properties.response = 1; + { + var error = $root.google.cloud.bigquery.storage.v1beta.StreamList.verify(message.streams); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a ListMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} ListMetastorePartitionsResponse + */ + ListMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse(); + if (object.partitions != null) { + if (typeof object.partitions !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.partitions: object expected"); + message.partitions = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList.fromObject(object.partitions); + } + if (object.streams != null) { + if (typeof object.streams !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse.streams: object expected"); + message.streams = $root.google.cloud.bigquery.storage.v1beta.StreamList.fromObject(object.streams); + } + return message; + }; + + /** + * Creates a plain object from a ListMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse} message ListMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ListMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + object.partitions = $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList.toObject(message.partitions, options); + if (options.oneofs) + object.response = "partitions"; + } + if (message.streams != null && message.hasOwnProperty("streams")) { + object.streams = $root.google.cloud.bigquery.storage.v1beta.StreamList.toObject(message.streams, options); + if (options.oneofs) + object.response = "streams"; + } + return object; + }; + + /** + * Converts this ListMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + ListMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ListMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ListMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse"; + }; + + return ListMetastorePartitionsResponse; + })(); + + v1beta.StreamMetastorePartitionsRequest = (function() { + + /** + * Properties of a StreamMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IStreamMetastorePartitionsRequest + * @property {string|null} [parent] StreamMetastorePartitionsRequest parent + * @property {Array.|null} [metastorePartitions] StreamMetastorePartitionsRequest metastorePartitions + * @property {boolean|null} [skipExistingPartitions] StreamMetastorePartitionsRequest skipExistingPartitions + */ + + /** + * Constructs a new StreamMetastorePartitionsRequest. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a StreamMetastorePartitionsRequest. + * @implements IStreamMetastorePartitionsRequest + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest=} [properties] Properties to set + */ + function StreamMetastorePartitionsRequest(properties) { + this.metastorePartitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamMetastorePartitionsRequest parent. + * @member {string} parent + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.parent = ""; + + /** + * StreamMetastorePartitionsRequest metastorePartitions. + * @member {Array.} metastorePartitions + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.metastorePartitions = $util.emptyArray; + + /** + * StreamMetastorePartitionsRequest skipExistingPartitions. + * @member {boolean} skipExistingPartitions + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @instance + */ + StreamMetastorePartitionsRequest.prototype.skipExistingPartitions = false; + + /** + * Creates a new StreamMetastorePartitionsRequest instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest instance + */ + StreamMetastorePartitionsRequest.create = function create(properties) { + return new StreamMetastorePartitionsRequest(properties); + }; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); + if (message.metastorePartitions != null && message.metastorePartitions.length) + for (var i = 0; i < message.metastorePartitions.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.metastorePartitions[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.skipExistingPartitions != null && Object.hasOwnProperty.call(message, "skipExistingPartitions")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.skipExistingPartitions); + return writer; + }; + + /** + * Encodes the specified StreamMetastorePartitionsRequest message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.parent = reader.string(); + break; + } + case 2: { + if (!(message.metastorePartitions && message.metastorePartitions.length)) + message.metastorePartitions = []; + message.metastorePartitions.push($root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32())); + break; + } + case 3: { + message.skipExistingPartitions = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamMetastorePartitionsRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamMetastorePartitionsRequest message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamMetastorePartitionsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.parent != null && message.hasOwnProperty("parent")) + if (!$util.isString(message.parent)) + return "parent: string expected"; + if (message.metastorePartitions != null && message.hasOwnProperty("metastorePartitions")) { + if (!Array.isArray(message.metastorePartitions)) + return "metastorePartitions: array expected"; + for (var i = 0; i < message.metastorePartitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.metastorePartitions[i]); + if (error) + return "metastorePartitions." + error; + } + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + if (typeof message.skipExistingPartitions !== "boolean") + return "skipExistingPartitions: boolean expected"; + return null; + }; + + /** + * Creates a StreamMetastorePartitionsRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest} StreamMetastorePartitionsRequest + */ + StreamMetastorePartitionsRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest(); + if (object.parent != null) + message.parent = String(object.parent); + if (object.metastorePartitions) { + if (!Array.isArray(object.metastorePartitions)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.metastorePartitions: array expected"); + message.metastorePartitions = []; + for (var i = 0; i < object.metastorePartitions.length; ++i) { + if (typeof object.metastorePartitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest.metastorePartitions: object expected"); + message.metastorePartitions[i] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.metastorePartitions[i]); + } + } + if (object.skipExistingPartitions != null) + message.skipExistingPartitions = Boolean(object.skipExistingPartitions); + return message; + }; + + /** + * Creates a plain object from a StreamMetastorePartitionsRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest} message StreamMetastorePartitionsRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamMetastorePartitionsRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.metastorePartitions = []; + if (options.defaults) { + object.parent = ""; + object.skipExistingPartitions = false; + } + if (message.parent != null && message.hasOwnProperty("parent")) + object.parent = message.parent; + if (message.metastorePartitions && message.metastorePartitions.length) { + object.metastorePartitions = []; + for (var j = 0; j < message.metastorePartitions.length; ++j) + object.metastorePartitions[j] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.metastorePartitions[j], options); + } + if (message.skipExistingPartitions != null && message.hasOwnProperty("skipExistingPartitions")) + object.skipExistingPartitions = message.skipExistingPartitions; + return object; + }; + + /** + * Converts this StreamMetastorePartitionsRequest to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @instance + * @returns {Object.} JSON object + */ + StreamMetastorePartitionsRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamMetastorePartitionsRequest + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamMetastorePartitionsRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest"; + }; + + return StreamMetastorePartitionsRequest; + })(); + + v1beta.StreamMetastorePartitionsResponse = (function() { + + /** + * Properties of a StreamMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IStreamMetastorePartitionsResponse + * @property {number|Long|null} [totalPartitionsStreamedCount] StreamMetastorePartitionsResponse totalPartitionsStreamedCount + * @property {number|Long|null} [totalPartitionsInsertedCount] StreamMetastorePartitionsResponse totalPartitionsInsertedCount + */ + + /** + * Constructs a new StreamMetastorePartitionsResponse. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a StreamMetastorePartitionsResponse. + * @implements IStreamMetastorePartitionsResponse + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse=} [properties] Properties to set + */ + function StreamMetastorePartitionsResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamMetastorePartitionsResponse totalPartitionsStreamedCount. + * @member {number|Long} totalPartitionsStreamedCount + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @instance + */ + StreamMetastorePartitionsResponse.prototype.totalPartitionsStreamedCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * StreamMetastorePartitionsResponse totalPartitionsInsertedCount. + * @member {number|Long} totalPartitionsInsertedCount + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @instance + */ + StreamMetastorePartitionsResponse.prototype.totalPartitionsInsertedCount = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new StreamMetastorePartitionsResponse instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse instance + */ + StreamMetastorePartitionsResponse.create = function create(properties) { + return new StreamMetastorePartitionsResponse(properties); + }; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.totalPartitionsStreamedCount != null && Object.hasOwnProperty.call(message, "totalPartitionsStreamedCount")) + writer.uint32(/* id 2, wireType 0 =*/16).int64(message.totalPartitionsStreamedCount); + if (message.totalPartitionsInsertedCount != null && Object.hasOwnProperty.call(message, "totalPartitionsInsertedCount")) + writer.uint32(/* id 3, wireType 0 =*/24).int64(message.totalPartitionsInsertedCount); + return writer; + }; + + /** + * Encodes the specified StreamMetastorePartitionsResponse message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamMetastorePartitionsResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsResponse.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 2: { + message.totalPartitionsStreamedCount = reader.int64(); + break; + } + case 3: { + message.totalPartitionsInsertedCount = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamMetastorePartitionsResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamMetastorePartitionsResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamMetastorePartitionsResponse message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamMetastorePartitionsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.totalPartitionsStreamedCount != null && message.hasOwnProperty("totalPartitionsStreamedCount")) + if (!$util.isInteger(message.totalPartitionsStreamedCount) && !(message.totalPartitionsStreamedCount && $util.isInteger(message.totalPartitionsStreamedCount.low) && $util.isInteger(message.totalPartitionsStreamedCount.high))) + return "totalPartitionsStreamedCount: integer|Long expected"; + if (message.totalPartitionsInsertedCount != null && message.hasOwnProperty("totalPartitionsInsertedCount")) + if (!$util.isInteger(message.totalPartitionsInsertedCount) && !(message.totalPartitionsInsertedCount && $util.isInteger(message.totalPartitionsInsertedCount.low) && $util.isInteger(message.totalPartitionsInsertedCount.high))) + return "totalPartitionsInsertedCount: integer|Long expected"; + return null; + }; + + /** + * Creates a StreamMetastorePartitionsResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} StreamMetastorePartitionsResponse + */ + StreamMetastorePartitionsResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse(); + if (object.totalPartitionsStreamedCount != null) + if ($util.Long) + (message.totalPartitionsStreamedCount = $util.Long.fromValue(object.totalPartitionsStreamedCount)).unsigned = false; + else if (typeof object.totalPartitionsStreamedCount === "string") + message.totalPartitionsStreamedCount = parseInt(object.totalPartitionsStreamedCount, 10); + else if (typeof object.totalPartitionsStreamedCount === "number") + message.totalPartitionsStreamedCount = object.totalPartitionsStreamedCount; + else if (typeof object.totalPartitionsStreamedCount === "object") + message.totalPartitionsStreamedCount = new $util.LongBits(object.totalPartitionsStreamedCount.low >>> 0, object.totalPartitionsStreamedCount.high >>> 0).toNumber(); + if (object.totalPartitionsInsertedCount != null) + if ($util.Long) + (message.totalPartitionsInsertedCount = $util.Long.fromValue(object.totalPartitionsInsertedCount)).unsigned = false; + else if (typeof object.totalPartitionsInsertedCount === "string") + message.totalPartitionsInsertedCount = parseInt(object.totalPartitionsInsertedCount, 10); + else if (typeof object.totalPartitionsInsertedCount === "number") + message.totalPartitionsInsertedCount = object.totalPartitionsInsertedCount; + else if (typeof object.totalPartitionsInsertedCount === "object") + message.totalPartitionsInsertedCount = new $util.LongBits(object.totalPartitionsInsertedCount.low >>> 0, object.totalPartitionsInsertedCount.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a StreamMetastorePartitionsResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse} message StreamMetastorePartitionsResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamMetastorePartitionsResponse.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.totalPartitionsStreamedCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.totalPartitionsStreamedCount = options.longs === String ? "0" : 0; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.totalPartitionsInsertedCount = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.totalPartitionsInsertedCount = options.longs === String ? "0" : 0; + } + if (message.totalPartitionsStreamedCount != null && message.hasOwnProperty("totalPartitionsStreamedCount")) + if (typeof message.totalPartitionsStreamedCount === "number") + object.totalPartitionsStreamedCount = options.longs === String ? String(message.totalPartitionsStreamedCount) : message.totalPartitionsStreamedCount; + else + object.totalPartitionsStreamedCount = options.longs === String ? $util.Long.prototype.toString.call(message.totalPartitionsStreamedCount) : options.longs === Number ? new $util.LongBits(message.totalPartitionsStreamedCount.low >>> 0, message.totalPartitionsStreamedCount.high >>> 0).toNumber() : message.totalPartitionsStreamedCount; + if (message.totalPartitionsInsertedCount != null && message.hasOwnProperty("totalPartitionsInsertedCount")) + if (typeof message.totalPartitionsInsertedCount === "number") + object.totalPartitionsInsertedCount = options.longs === String ? String(message.totalPartitionsInsertedCount) : message.totalPartitionsInsertedCount; + else + object.totalPartitionsInsertedCount = options.longs === String ? $util.Long.prototype.toString.call(message.totalPartitionsInsertedCount) : options.longs === Number ? new $util.LongBits(message.totalPartitionsInsertedCount.low >>> 0, message.totalPartitionsInsertedCount.high >>> 0).toNumber() : message.totalPartitionsInsertedCount; + return object; + }; + + /** + * Converts this StreamMetastorePartitionsResponse to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @instance + * @returns {Object.} JSON object + */ + StreamMetastorePartitionsResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamMetastorePartitionsResponse + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamMetastorePartitionsResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse"; + }; + + return StreamMetastorePartitionsResponse; + })(); + + v1beta.BatchSizeTooLargeError = (function() { + + /** + * Properties of a BatchSizeTooLargeError. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IBatchSizeTooLargeError + * @property {number|Long|null} [maxBatchSize] BatchSizeTooLargeError maxBatchSize + * @property {string|null} [errorMessage] BatchSizeTooLargeError errorMessage + */ + + /** + * Constructs a new BatchSizeTooLargeError. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a BatchSizeTooLargeError. + * @implements IBatchSizeTooLargeError + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError=} [properties] Properties to set + */ + function BatchSizeTooLargeError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * BatchSizeTooLargeError maxBatchSize. + * @member {number|Long} maxBatchSize + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @instance + */ + BatchSizeTooLargeError.prototype.maxBatchSize = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * BatchSizeTooLargeError errorMessage. + * @member {string} errorMessage + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @instance + */ + BatchSizeTooLargeError.prototype.errorMessage = ""; + + /** + * Creates a new BatchSizeTooLargeError instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError} BatchSizeTooLargeError instance + */ + BatchSizeTooLargeError.create = function create(properties) { + return new BatchSizeTooLargeError(properties); + }; + + /** + * Encodes the specified BatchSizeTooLargeError message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError} message BatchSizeTooLargeError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchSizeTooLargeError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.maxBatchSize != null && Object.hasOwnProperty.call(message, "maxBatchSize")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.maxBatchSize); + if (message.errorMessage != null && Object.hasOwnProperty.call(message, "errorMessage")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.errorMessage); + return writer; + }; + + /** + * Encodes the specified BatchSizeTooLargeError message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1beta.IBatchSizeTooLargeError} message BatchSizeTooLargeError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + BatchSizeTooLargeError.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError} BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchSizeTooLargeError.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.maxBatchSize = reader.int64(); + break; + } + case 2: { + message.errorMessage = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a BatchSizeTooLargeError message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError} BatchSizeTooLargeError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + BatchSizeTooLargeError.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a BatchSizeTooLargeError message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + BatchSizeTooLargeError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.maxBatchSize != null && message.hasOwnProperty("maxBatchSize")) + if (!$util.isInteger(message.maxBatchSize) && !(message.maxBatchSize && $util.isInteger(message.maxBatchSize.low) && $util.isInteger(message.maxBatchSize.high))) + return "maxBatchSize: integer|Long expected"; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + if (!$util.isString(message.errorMessage)) + return "errorMessage: string expected"; + return null; + }; + + /** + * Creates a BatchSizeTooLargeError message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError} BatchSizeTooLargeError + */ + BatchSizeTooLargeError.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError(); + if (object.maxBatchSize != null) + if ($util.Long) + (message.maxBatchSize = $util.Long.fromValue(object.maxBatchSize)).unsigned = false; + else if (typeof object.maxBatchSize === "string") + message.maxBatchSize = parseInt(object.maxBatchSize, 10); + else if (typeof object.maxBatchSize === "number") + message.maxBatchSize = object.maxBatchSize; + else if (typeof object.maxBatchSize === "object") + message.maxBatchSize = new $util.LongBits(object.maxBatchSize.low >>> 0, object.maxBatchSize.high >>> 0).toNumber(); + if (object.errorMessage != null) + message.errorMessage = String(object.errorMessage); + return message; + }; + + /** + * Creates a plain object from a BatchSizeTooLargeError message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError} message BatchSizeTooLargeError + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + BatchSizeTooLargeError.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.maxBatchSize = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.maxBatchSize = options.longs === String ? "0" : 0; + object.errorMessage = ""; + } + if (message.maxBatchSize != null && message.hasOwnProperty("maxBatchSize")) + if (typeof message.maxBatchSize === "number") + object.maxBatchSize = options.longs === String ? String(message.maxBatchSize) : message.maxBatchSize; + else + object.maxBatchSize = options.longs === String ? $util.Long.prototype.toString.call(message.maxBatchSize) : options.longs === Number ? new $util.LongBits(message.maxBatchSize.low >>> 0, message.maxBatchSize.high >>> 0).toNumber() : message.maxBatchSize; + if (message.errorMessage != null && message.hasOwnProperty("errorMessage")) + object.errorMessage = message.errorMessage; + return object; + }; + + /** + * Converts this BatchSizeTooLargeError to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @instance + * @returns {Object.} JSON object + */ + BatchSizeTooLargeError.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for BatchSizeTooLargeError + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + BatchSizeTooLargeError.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.BatchSizeTooLargeError"; + }; + + return BatchSizeTooLargeError; + })(); + + v1beta.FieldSchema = (function() { + + /** + * Properties of a FieldSchema. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IFieldSchema + * @property {string|null} [name] FieldSchema name + * @property {string|null} [type] FieldSchema type + */ + + /** + * Constructs a new FieldSchema. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a FieldSchema. + * @implements IFieldSchema + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IFieldSchema=} [properties] Properties to set + */ + function FieldSchema(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FieldSchema name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @instance + */ + FieldSchema.prototype.name = ""; + + /** + * FieldSchema type. + * @member {string} type + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @instance + */ + FieldSchema.prototype.type = ""; + + /** + * Creates a new FieldSchema instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta.IFieldSchema=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.FieldSchema} FieldSchema instance + */ + FieldSchema.create = function create(properties) { + return new FieldSchema(properties); + }; + + /** + * Encodes the specified FieldSchema message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.FieldSchema.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta.IFieldSchema} message FieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldSchema.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.type); + return writer; + }; + + /** + * Encodes the specified FieldSchema message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.FieldSchema.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta.IFieldSchema} message FieldSchema message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FieldSchema.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FieldSchema message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.FieldSchema} FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldSchema.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.FieldSchema(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FieldSchema message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.FieldSchema} FieldSchema + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FieldSchema.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FieldSchema message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FieldSchema.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) + if (!$util.isString(message.type)) + return "type: string expected"; + return null; + }; + + /** + * Creates a FieldSchema message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.FieldSchema} FieldSchema + */ + FieldSchema.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.FieldSchema) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.FieldSchema(); + if (object.name != null) + message.name = String(object.name); + if (object.type != null) + message.type = String(object.type); + return message; + }; + + /** + * Creates a plain object from a FieldSchema message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {google.cloud.bigquery.storage.v1beta.FieldSchema} message FieldSchema + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FieldSchema.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.type = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = message.type; + return object; + }; + + /** + * Converts this FieldSchema to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @instance + * @returns {Object.} JSON object + */ + FieldSchema.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FieldSchema + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.FieldSchema + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FieldSchema.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.FieldSchema"; + }; + + return FieldSchema; + })(); + + v1beta.StorageDescriptor = (function() { + + /** + * Properties of a StorageDescriptor. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IStorageDescriptor + * @property {string|null} [locationUri] StorageDescriptor locationUri + * @property {string|null} [inputFormat] StorageDescriptor inputFormat + * @property {string|null} [outputFormat] StorageDescriptor outputFormat + * @property {google.cloud.bigquery.storage.v1beta.ISerDeInfo|null} [serdeInfo] StorageDescriptor serdeInfo + */ + + /** + * Constructs a new StorageDescriptor. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a StorageDescriptor. + * @implements IStorageDescriptor + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IStorageDescriptor=} [properties] Properties to set + */ + function StorageDescriptor(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StorageDescriptor locationUri. + * @member {string} locationUri + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.locationUri = ""; + + /** + * StorageDescriptor inputFormat. + * @member {string} inputFormat + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.inputFormat = ""; + + /** + * StorageDescriptor outputFormat. + * @member {string} outputFormat + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.outputFormat = ""; + + /** + * StorageDescriptor serdeInfo. + * @member {google.cloud.bigquery.storage.v1beta.ISerDeInfo|null|undefined} serdeInfo + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @instance + */ + StorageDescriptor.prototype.serdeInfo = null; + + /** + * Creates a new StorageDescriptor instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStorageDescriptor=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.StorageDescriptor} StorageDescriptor instance + */ + StorageDescriptor.create = function create(properties) { + return new StorageDescriptor(properties); + }; + + /** + * Encodes the specified StorageDescriptor message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StorageDescriptor.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStorageDescriptor} message StorageDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageDescriptor.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.locationUri != null && Object.hasOwnProperty.call(message, "locationUri")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.locationUri); + if (message.inputFormat != null && Object.hasOwnProperty.call(message, "inputFormat")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputFormat); + if (message.outputFormat != null && Object.hasOwnProperty.call(message, "outputFormat")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputFormat); + if (message.serdeInfo != null && Object.hasOwnProperty.call(message, "serdeInfo")) + $root.google.cloud.bigquery.storage.v1beta.SerDeInfo.encode(message.serdeInfo, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StorageDescriptor message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StorageDescriptor.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStorageDescriptor} message StorageDescriptor message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StorageDescriptor.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.StorageDescriptor} StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageDescriptor.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.locationUri = reader.string(); + break; + } + case 2: { + message.inputFormat = reader.string(); + break; + } + case 3: { + message.outputFormat = reader.string(); + break; + } + case 4: { + message.serdeInfo = $root.google.cloud.bigquery.storage.v1beta.SerDeInfo.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StorageDescriptor message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.StorageDescriptor} StorageDescriptor + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StorageDescriptor.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StorageDescriptor message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StorageDescriptor.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.locationUri != null && message.hasOwnProperty("locationUri")) + if (!$util.isString(message.locationUri)) + return "locationUri: string expected"; + if (message.inputFormat != null && message.hasOwnProperty("inputFormat")) + if (!$util.isString(message.inputFormat)) + return "inputFormat: string expected"; + if (message.outputFormat != null && message.hasOwnProperty("outputFormat")) + if (!$util.isString(message.outputFormat)) + return "outputFormat: string expected"; + if (message.serdeInfo != null && message.hasOwnProperty("serdeInfo")) { + var error = $root.google.cloud.bigquery.storage.v1beta.SerDeInfo.verify(message.serdeInfo); + if (error) + return "serdeInfo." + error; + } + return null; + }; + + /** + * Creates a StorageDescriptor message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.StorageDescriptor} StorageDescriptor + */ + StorageDescriptor.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor(); + if (object.locationUri != null) + message.locationUri = String(object.locationUri); + if (object.inputFormat != null) + message.inputFormat = String(object.inputFormat); + if (object.outputFormat != null) + message.outputFormat = String(object.outputFormat); + if (object.serdeInfo != null) { + if (typeof object.serdeInfo !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.StorageDescriptor.serdeInfo: object expected"); + message.serdeInfo = $root.google.cloud.bigquery.storage.v1beta.SerDeInfo.fromObject(object.serdeInfo); + } + return message; + }; + + /** + * Creates a plain object from a StorageDescriptor message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {google.cloud.bigquery.storage.v1beta.StorageDescriptor} message StorageDescriptor + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StorageDescriptor.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.locationUri = ""; + object.inputFormat = ""; + object.outputFormat = ""; + object.serdeInfo = null; + } + if (message.locationUri != null && message.hasOwnProperty("locationUri")) + object.locationUri = message.locationUri; + if (message.inputFormat != null && message.hasOwnProperty("inputFormat")) + object.inputFormat = message.inputFormat; + if (message.outputFormat != null && message.hasOwnProperty("outputFormat")) + object.outputFormat = message.outputFormat; + if (message.serdeInfo != null && message.hasOwnProperty("serdeInfo")) + object.serdeInfo = $root.google.cloud.bigquery.storage.v1beta.SerDeInfo.toObject(message.serdeInfo, options); + return object; + }; + + /** + * Converts this StorageDescriptor to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @instance + * @returns {Object.} JSON object + */ + StorageDescriptor.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StorageDescriptor + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.StorageDescriptor + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StorageDescriptor.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.StorageDescriptor"; + }; + + return StorageDescriptor; + })(); + + v1beta.SerDeInfo = (function() { + + /** + * Properties of a SerDeInfo. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface ISerDeInfo + * @property {string|null} [name] SerDeInfo name + * @property {string|null} [serializationLibrary] SerDeInfo serializationLibrary + * @property {Object.|null} [parameters] SerDeInfo parameters + */ + + /** + * Constructs a new SerDeInfo. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a SerDeInfo. + * @implements ISerDeInfo + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.ISerDeInfo=} [properties] Properties to set + */ + function SerDeInfo(properties) { + this.parameters = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SerDeInfo name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @instance + */ + SerDeInfo.prototype.name = ""; + + /** + * SerDeInfo serializationLibrary. + * @member {string} serializationLibrary + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @instance + */ + SerDeInfo.prototype.serializationLibrary = ""; + + /** + * SerDeInfo parameters. + * @member {Object.} parameters + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @instance + */ + SerDeInfo.prototype.parameters = $util.emptyObject; + + /** + * Creates a new SerDeInfo instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1beta.ISerDeInfo=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.SerDeInfo} SerDeInfo instance + */ + SerDeInfo.create = function create(properties) { + return new SerDeInfo(properties); + }; + + /** + * Encodes the specified SerDeInfo message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.SerDeInfo.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1beta.ISerDeInfo} message SerDeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SerDeInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.serializationLibrary != null && Object.hasOwnProperty.call(message, "serializationLibrary")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.serializationLibrary); + if (message.parameters != null && Object.hasOwnProperty.call(message, "parameters")) + for (var keys = Object.keys(message.parameters), i = 0; i < keys.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.parameters[keys[i]]).ldelim(); + return writer; + }; + + /** + * Encodes the specified SerDeInfo message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.SerDeInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1beta.ISerDeInfo} message SerDeInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SerDeInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.SerDeInfo} SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SerDeInfo.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.SerDeInfo(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.serializationLibrary = reader.string(); + break; + } + case 3: { + if (message.parameters === $util.emptyObject) + message.parameters = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.parameters[key] = value; + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SerDeInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.SerDeInfo} SerDeInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SerDeInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SerDeInfo message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SerDeInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.serializationLibrary != null && message.hasOwnProperty("serializationLibrary")) + if (!$util.isString(message.serializationLibrary)) + return "serializationLibrary: string expected"; + if (message.parameters != null && message.hasOwnProperty("parameters")) { + if (!$util.isObject(message.parameters)) + return "parameters: object expected"; + var key = Object.keys(message.parameters); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.parameters[key[i]])) + return "parameters: string{k:string} expected"; + } + return null; + }; + + /** + * Creates a SerDeInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.SerDeInfo} SerDeInfo + */ + SerDeInfo.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.SerDeInfo) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.SerDeInfo(); + if (object.name != null) + message.name = String(object.name); + if (object.serializationLibrary != null) + message.serializationLibrary = String(object.serializationLibrary); + if (object.parameters) { + if (typeof object.parameters !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.SerDeInfo.parameters: object expected"); + message.parameters = {}; + for (var keys = Object.keys(object.parameters), i = 0; i < keys.length; ++i) + message.parameters[keys[i]] = String(object.parameters[keys[i]]); + } + return message; + }; + + /** + * Creates a plain object from a SerDeInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {google.cloud.bigquery.storage.v1beta.SerDeInfo} message SerDeInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SerDeInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.objects || options.defaults) + object.parameters = {}; + if (options.defaults) { + object.name = ""; + object.serializationLibrary = ""; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.serializationLibrary != null && message.hasOwnProperty("serializationLibrary")) + object.serializationLibrary = message.serializationLibrary; + var keys2; + if (message.parameters && (keys2 = Object.keys(message.parameters)).length) { + object.parameters = {}; + for (var j = 0; j < keys2.length; ++j) + object.parameters[keys2[j]] = message.parameters[keys2[j]]; + } + return object; + }; + + /** + * Converts this SerDeInfo to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @instance + * @returns {Object.} JSON object + */ + SerDeInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SerDeInfo + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.SerDeInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SerDeInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.SerDeInfo"; + }; + + return SerDeInfo; + })(); + + v1beta.MetastorePartition = (function() { + + /** + * Properties of a MetastorePartition. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IMetastorePartition + * @property {Array.|null} [values] MetastorePartition values + * @property {google.protobuf.ITimestamp|null} [createTime] MetastorePartition createTime + * @property {google.cloud.bigquery.storage.v1beta.IStorageDescriptor|null} [storageDescriptor] MetastorePartition storageDescriptor + * @property {Object.|null} [parameters] MetastorePartition parameters + * @property {Array.|null} [fields] MetastorePartition fields + */ + + /** + * Constructs a new MetastorePartition. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a MetastorePartition. + * @implements IMetastorePartition + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartition=} [properties] Properties to set + */ + function MetastorePartition(properties) { + this.values = []; + this.parameters = {}; + this.fields = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartition values. + * @member {Array.} values + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + */ + MetastorePartition.prototype.values = $util.emptyArray; + + /** + * MetastorePartition createTime. + * @member {google.protobuf.ITimestamp|null|undefined} createTime + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + */ + MetastorePartition.prototype.createTime = null; + + /** + * MetastorePartition storageDescriptor. + * @member {google.cloud.bigquery.storage.v1beta.IStorageDescriptor|null|undefined} storageDescriptor + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + */ + MetastorePartition.prototype.storageDescriptor = null; + + /** + * MetastorePartition parameters. + * @member {Object.} parameters + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + */ + MetastorePartition.prototype.parameters = $util.emptyObject; + + /** + * MetastorePartition fields. + * @member {Array.} fields + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + */ + MetastorePartition.prototype.fields = $util.emptyArray; + + /** + * Creates a new MetastorePartition instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartition=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartition} MetastorePartition instance + */ + MetastorePartition.create = function create(properties) { + return new MetastorePartition(properties); + }; + + /** + * Encodes the specified MetastorePartition message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartition.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartition} message MetastorePartition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartition.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.values[i]); + if (message.createTime != null && Object.hasOwnProperty.call(message, "createTime")) + $root.google.protobuf.Timestamp.encode(message.createTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.storageDescriptor != null && Object.hasOwnProperty.call(message, "storageDescriptor")) + $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor.encode(message.storageDescriptor, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.parameters != null && Object.hasOwnProperty.call(message, "parameters")) + for (var keys = Object.keys(message.parameters), i = 0; i < keys.length; ++i) + writer.uint32(/* id 4, wireType 2 =*/34).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.parameters[keys[i]]).ldelim(); + if (message.fields != null && message.fields.length) + for (var i = 0; i < message.fields.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.FieldSchema.encode(message.fields[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MetastorePartition message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartition.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartition} message MetastorePartition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartition.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartition} MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartition.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartition(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push(reader.string()); + break; + } + case 2: { + message.createTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 3: { + message.storageDescriptor = $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor.decode(reader, reader.uint32()); + break; + } + case 4: { + if (message.parameters === $util.emptyObject) + message.parameters = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.parameters[key] = value; + break; + } + case 5: { + if (!(message.fields && message.fields.length)) + message.fields = []; + message.fields.push($root.google.cloud.bigquery.storage.v1beta.FieldSchema.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartition message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartition} MetastorePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartition.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartition message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartition.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) + if (!$util.isString(message.values[i])) + return "values: string[] expected"; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.createTime); + if (error) + return "createTime." + error; + } + if (message.storageDescriptor != null && message.hasOwnProperty("storageDescriptor")) { + var error = $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor.verify(message.storageDescriptor); + if (error) + return "storageDescriptor." + error; + } + if (message.parameters != null && message.hasOwnProperty("parameters")) { + if (!$util.isObject(message.parameters)) + return "parameters: object expected"; + var key = Object.keys(message.parameters); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.parameters[key[i]])) + return "parameters: string{k:string} expected"; + } + if (message.fields != null && message.hasOwnProperty("fields")) { + if (!Array.isArray(message.fields)) + return "fields: array expected"; + for (var i = 0; i < message.fields.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.FieldSchema.verify(message.fields[i]); + if (error) + return "fields." + error; + } + } + return null; + }; + + /** + * Creates a MetastorePartition message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartition} MetastorePartition + */ + MetastorePartition.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.MetastorePartition) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartition(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) + message.values[i] = String(object.values[i]); + } + if (object.createTime != null) { + if (typeof object.createTime !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.createTime: object expected"); + message.createTime = $root.google.protobuf.Timestamp.fromObject(object.createTime); + } + if (object.storageDescriptor != null) { + if (typeof object.storageDescriptor !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.storageDescriptor: object expected"); + message.storageDescriptor = $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor.fromObject(object.storageDescriptor); + } + if (object.parameters) { + if (typeof object.parameters !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.parameters: object expected"); + message.parameters = {}; + for (var keys = Object.keys(object.parameters), i = 0; i < keys.length; ++i) + message.parameters[keys[i]] = String(object.parameters[keys[i]]); + } + if (object.fields) { + if (!Array.isArray(object.fields)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.fields: array expected"); + message.fields = []; + for (var i = 0; i < object.fields.length; ++i) { + if (typeof object.fields[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartition.fields: object expected"); + message.fields[i] = $root.google.cloud.bigquery.storage.v1beta.FieldSchema.fromObject(object.fields[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartition message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartition} message MetastorePartition + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartition.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.values = []; + object.fields = []; + } + if (options.objects || options.defaults) + object.parameters = {}; + if (options.defaults) { + object.createTime = null; + object.storageDescriptor = null; + } + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = message.values[j]; + } + if (message.createTime != null && message.hasOwnProperty("createTime")) + object.createTime = $root.google.protobuf.Timestamp.toObject(message.createTime, options); + if (message.storageDescriptor != null && message.hasOwnProperty("storageDescriptor")) + object.storageDescriptor = $root.google.cloud.bigquery.storage.v1beta.StorageDescriptor.toObject(message.storageDescriptor, options); + var keys2; + if (message.parameters && (keys2 = Object.keys(message.parameters)).length) { + object.parameters = {}; + for (var j = 0; j < keys2.length; ++j) + object.parameters[keys2[j]] = message.parameters[keys2[j]]; + } + if (message.fields && message.fields.length) { + object.fields = []; + for (var j = 0; j < message.fields.length; ++j) + object.fields[j] = $root.google.cloud.bigquery.storage.v1beta.FieldSchema.toObject(message.fields[j], options); + } + return object; + }; + + /** + * Converts this MetastorePartition to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @instance + * @returns {Object.} JSON object + */ + MetastorePartition.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartition + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartition + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartition.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.MetastorePartition"; + }; + + return MetastorePartition; + })(); + + v1beta.MetastorePartitionList = (function() { + + /** + * Properties of a MetastorePartitionList. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IMetastorePartitionList + * @property {Array.|null} [partitions] MetastorePartitionList partitions + */ + + /** + * Constructs a new MetastorePartitionList. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a MetastorePartitionList. + * @implements IMetastorePartitionList + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList=} [properties] Properties to set + */ + function MetastorePartitionList(properties) { + this.partitions = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartitionList partitions. + * @member {Array.} partitions + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @instance + */ + MetastorePartitionList.prototype.partitions = $util.emptyArray; + + /** + * Creates a new MetastorePartitionList instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionList} MetastorePartitionList instance + */ + MetastorePartitionList.create = function create(properties) { + return new MetastorePartitionList(properties); + }; + + /** + * Encodes the specified MetastorePartitionList message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionList.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList} message MetastorePartitionList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionList.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.partitions != null && message.partitions.length) + for (var i = 0; i < message.partitions.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.encode(message.partitions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified MetastorePartitionList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionList.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionList} message MetastorePartitionList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionList.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionList} MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionList.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.partitions && message.partitions.length)) + message.partitions = []; + message.partitions.push($root.google.cloud.bigquery.storage.v1beta.MetastorePartition.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartitionList message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionList} MetastorePartitionList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionList.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartitionList message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartitionList.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.partitions != null && message.hasOwnProperty("partitions")) { + if (!Array.isArray(message.partitions)) + return "partitions: array expected"; + for (var i = 0; i < message.partitions.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.verify(message.partitions[i]); + if (error) + return "partitions." + error; + } + } + return null; + }; + + /** + * Creates a MetastorePartitionList message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionList} MetastorePartitionList + */ + MetastorePartitionList.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionList(); + if (object.partitions) { + if (!Array.isArray(object.partitions)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartitionList.partitions: array expected"); + message.partitions = []; + for (var i = 0; i < object.partitions.length; ++i) { + if (typeof object.partitions[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartitionList.partitions: object expected"); + message.partitions[i] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.fromObject(object.partitions[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartitionList message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionList} message MetastorePartitionList + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartitionList.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitions = []; + if (message.partitions && message.partitions.length) { + object.partitions = []; + for (var j = 0; j < message.partitions.length; ++j) + object.partitions[j] = $root.google.cloud.bigquery.storage.v1beta.MetastorePartition.toObject(message.partitions[j], options); + } + return object; + }; + + /** + * Converts this MetastorePartitionList to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @instance + * @returns {Object.} JSON object + */ + MetastorePartitionList.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartitionList + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionList + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartitionList.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.MetastorePartitionList"; + }; + + return MetastorePartitionList; + })(); + + v1beta.ReadStream = (function() { + + /** + * Properties of a ReadStream. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IReadStream + * @property {string|null} [name] ReadStream name + */ + + /** + * Constructs a new ReadStream. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a ReadStream. + * @implements IReadStream + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IReadStream=} [properties] Properties to set + */ + function ReadStream(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ReadStream name. + * @member {string} name + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @instance + */ + ReadStream.prototype.name = ""; + + /** + * Creates a new ReadStream instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1beta.IReadStream=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.ReadStream} ReadStream instance + */ + ReadStream.create = function create(properties) { + return new ReadStream(properties); + }; + + /** + * Encodes the specified ReadStream message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ReadStream.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1beta.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + return writer; + }; + + /** + * Encodes the specified ReadStream message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.ReadStream.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1beta.IReadStream} message ReadStream message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ReadStream.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.ReadStream(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ReadStream message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.ReadStream} ReadStream + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ReadStream.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ReadStream message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ReadStream.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + return null; + }; + + /** + * Creates a ReadStream message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.ReadStream} ReadStream + */ + ReadStream.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.ReadStream) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.ReadStream(); + if (object.name != null) + message.name = String(object.name); + return message; + }; + + /** + * Creates a plain object from a ReadStream message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {google.cloud.bigquery.storage.v1beta.ReadStream} message ReadStream + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ReadStream.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.name = ""; + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + return object; + }; + + /** + * Converts this ReadStream to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @instance + * @returns {Object.} JSON object + */ + ReadStream.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ReadStream + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.ReadStream + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ReadStream.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.ReadStream"; + }; + + return ReadStream; + })(); + + v1beta.StreamList = (function() { + + /** + * Properties of a StreamList. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IStreamList + * @property {Array.|null} [streams] StreamList streams + */ + + /** + * Constructs a new StreamList. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a StreamList. + * @implements IStreamList + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IStreamList=} [properties] Properties to set + */ + function StreamList(properties) { + this.streams = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * StreamList streams. + * @member {Array.} streams + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @instance + */ + StreamList.prototype.streams = $util.emptyArray; + + /** + * Creates a new StreamList instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamList=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.StreamList} StreamList instance + */ + StreamList.create = function create(properties) { + return new StreamList(properties); + }; + + /** + * Encodes the specified StreamList message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamList.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamList} message StreamList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamList.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.streams != null && message.streams.length) + for (var i = 0; i < message.streams.length; ++i) + $root.google.cloud.bigquery.storage.v1beta.ReadStream.encode(message.streams[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified StreamList message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.StreamList.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1beta.IStreamList} message StreamList message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + StreamList.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a StreamList message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.StreamList} StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamList.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.StreamList(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.streams && message.streams.length)) + message.streams = []; + message.streams.push($root.google.cloud.bigquery.storage.v1beta.ReadStream.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a StreamList message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.StreamList} StreamList + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + StreamList.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a StreamList message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + StreamList.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.streams != null && message.hasOwnProperty("streams")) { + if (!Array.isArray(message.streams)) + return "streams: array expected"; + for (var i = 0; i < message.streams.length; ++i) { + var error = $root.google.cloud.bigquery.storage.v1beta.ReadStream.verify(message.streams[i]); + if (error) + return "streams." + error; + } + } + return null; + }; + + /** + * Creates a StreamList message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.StreamList} StreamList + */ + StreamList.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.StreamList) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.StreamList(); + if (object.streams) { + if (!Array.isArray(object.streams)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.StreamList.streams: array expected"); + message.streams = []; + for (var i = 0; i < object.streams.length; ++i) { + if (typeof object.streams[i] !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1beta.StreamList.streams: object expected"); + message.streams[i] = $root.google.cloud.bigquery.storage.v1beta.ReadStream.fromObject(object.streams[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a StreamList message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {google.cloud.bigquery.storage.v1beta.StreamList} message StreamList + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + StreamList.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.streams = []; + if (message.streams && message.streams.length) { + object.streams = []; + for (var j = 0; j < message.streams.length; ++j) + object.streams[j] = $root.google.cloud.bigquery.storage.v1beta.ReadStream.toObject(message.streams[j], options); + } + return object; + }; + + /** + * Converts this StreamList to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @instance + * @returns {Object.} JSON object + */ + StreamList.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for StreamList + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.StreamList + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + StreamList.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.StreamList"; + }; + + return StreamList; + })(); + + v1beta.MetastorePartitionValues = (function() { + + /** + * Properties of a MetastorePartitionValues. + * @memberof google.cloud.bigquery.storage.v1beta + * @interface IMetastorePartitionValues + * @property {Array.|null} [values] MetastorePartitionValues values + */ + + /** + * Constructs a new MetastorePartitionValues. + * @memberof google.cloud.bigquery.storage.v1beta + * @classdesc Represents a MetastorePartitionValues. + * @implements IMetastorePartitionValues + * @constructor + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues=} [properties] Properties to set + */ + function MetastorePartitionValues(properties) { + this.values = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MetastorePartitionValues values. + * @member {Array.} values + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @instance + */ + MetastorePartitionValues.prototype.values = $util.emptyArray; + + /** + * Creates a new MetastorePartitionValues instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues=} [properties] Properties to set + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionValues} MetastorePartitionValues instance + */ + MetastorePartitionValues.create = function create(properties) { + return new MetastorePartitionValues(properties); + }; + + /** + * Encodes the specified MetastorePartitionValues message. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues} message MetastorePartitionValues message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionValues.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.values != null && message.values.length) + for (var i = 0; i < message.values.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.values[i]); + return writer; + }; + + /** + * Encodes the specified MetastorePartitionValues message, length delimited. Does not implicitly {@link google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1beta.IMetastorePartitionValues} message MetastorePartitionValues message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MetastorePartitionValues.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionValues} MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionValues.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.values && message.values.length)) + message.values = []; + message.values.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MetastorePartitionValues message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionValues} MetastorePartitionValues + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MetastorePartitionValues.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MetastorePartitionValues message. + * @function verify + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MetastorePartitionValues.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.values != null && message.hasOwnProperty("values")) { + if (!Array.isArray(message.values)) + return "values: array expected"; + for (var i = 0; i < message.values.length; ++i) + if (!$util.isString(message.values[i])) + return "values: string[] expected"; + } + return null; + }; + + /** + * Creates a MetastorePartitionValues message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.storage.v1beta.MetastorePartitionValues} MetastorePartitionValues + */ + MetastorePartitionValues.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues) + return object; + var message = new $root.google.cloud.bigquery.storage.v1beta.MetastorePartitionValues(); + if (object.values) { + if (!Array.isArray(object.values)) + throw TypeError(".google.cloud.bigquery.storage.v1beta.MetastorePartitionValues.values: array expected"); + message.values = []; + for (var i = 0; i < object.values.length; ++i) + message.values[i] = String(object.values[i]); + } + return message; + }; + + /** + * Creates a plain object from a MetastorePartitionValues message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {google.cloud.bigquery.storage.v1beta.MetastorePartitionValues} message MetastorePartitionValues + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MetastorePartitionValues.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.values = []; + if (message.values && message.values.length) { + object.values = []; + for (var j = 0; j < message.values.length; ++j) + object.values[j] = message.values[j]; + } + return object; + }; + + /** + * Converts this MetastorePartitionValues to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @instance + * @returns {Object.} JSON object + */ + MetastorePartitionValues.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MetastorePartitionValues + * @function getTypeUrl + * @memberof google.cloud.bigquery.storage.v1beta.MetastorePartitionValues + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MetastorePartitionValues.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.storage.v1beta.MetastorePartitionValues"; + }; + + return MetastorePartitionValues; + })(); + + return v1beta; + })(); + storage.v1beta1 = (function() { /** diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 18f0616d7a3..5568792a1ba 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -1213,13 +1213,13 @@ "requestType": "BatchCreateMetastorePartitionsRequest", "responseType": "BatchCreateMetastorePartitionsResponse", "options": { - "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate", + "(google.api.http).post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate", "(google.api.http).body": "*" }, "parsedOptions": [ { "(google.api.http)": { - "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchCreate", + "post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate", "body": "*" } } @@ -1229,13 +1229,13 @@ "requestType": "BatchDeleteMetastorePartitionsRequest", "responseType": "google.protobuf.Empty", "options": { - "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete", + "(google.api.http).post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete", "(google.api.http).body": "*" }, "parsedOptions": [ { "(google.api.http)": { - "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchDelete", + "post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete", "body": "*" } } @@ -1245,13 +1245,13 @@ "requestType": "BatchUpdateMetastorePartitionsRequest", "responseType": "BatchUpdateMetastorePartitionsResponse", "options": { - "(google.api.http).post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate", + "(google.api.http).post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate", "(google.api.http).body": "*" }, "parsedOptions": [ { "(google.api.http)": { - "post": "/v1alpha/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:batchUpdate", + "post": "/v1alpha/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate", "body": "*" } } @@ -1326,6 +1326,521 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "BatchCreateMetastorePartitionsResponse": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1 + } + } + }, + "BatchDeleteMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "partitionValues": { + "rule": "repeated", + "type": "MetastorePartitionValues", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "UpdateMetastorePartitionRequest": { + "fields": { + "metastorePartition": { + "type": "MetastorePartition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "updateMask": { + "type": "google.protobuf.FieldMask", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "BatchUpdateMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "requests": { + "rule": "repeated", + "type": "UpdateMetastorePartitionRequest", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "BatchUpdateMetastorePartitionsResponse": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1 + } + } + }, + "ListMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "filter": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "traceId": { + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "ListMetastorePartitionsResponse": { + "oneofs": { + "response": { + "oneof": [ + "partitions", + "streams" + ] + } + }, + "fields": { + "partitions": { + "type": "MetastorePartitionList", + "id": 1 + }, + "streams": { + "type": "StreamList", + "id": 2 + } + } + }, + "StreamMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "metastorePartitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "skipExistingPartitions": { + "type": "bool", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "StreamMetastorePartitionsResponse": { + "fields": { + "totalPartitionsStreamedCount": { + "type": "int64", + "id": 2 + }, + "totalPartitionsInsertedCount": { + "type": "int64", + "id": 3 + } + } + }, + "BatchSizeTooLargeError": { + "fields": { + "maxBatchSize": { + "type": "int64", + "id": 1 + }, + "errorMessage": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "FieldSchema": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "type": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "StorageDescriptor": { + "fields": { + "locationUri": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "inputFormat": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "outputFormat": { + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "serdeInfo": { + "type": "SerDeInfo", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "SerDeInfo": { + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "serializationLibrary": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "parameters": { + "keyType": "string", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "MetastorePartition": { + "fields": { + "values": { + "rule": "repeated", + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "createTime": { + "type": "google.protobuf.Timestamp", + "id": 2, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + }, + "storageDescriptor": { + "type": "StorageDescriptor", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "parameters": { + "keyType": "string", + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "fields": { + "rule": "repeated", + "type": "FieldSchema", + "id": 5, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "MetastorePartitionList": { + "fields": { + "partitions": { + "rule": "repeated", + "type": "MetastorePartition", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "ReadStream": { + "options": { + "(google.api.resource).type": "bigquerystorage.googleapis.com/ReadStream", + "(google.api.resource).pattern": "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}", + "(google.api.resource).plural": "readStreams", + "(google.api.resource).singular": "readStream" + }, + "fields": { + "name": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "IDENTIFIER" + } + } + } + }, + "StreamList": { + "fields": { + "streams": { + "rule": "repeated", + "type": "ReadStream", + "id": 1, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } + } + } + }, + "MetastorePartitionValues": { + "fields": { + "values": { + "rule": "repeated", + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + } + } + }, + "v1beta": { + "options": { + "csharp_namespace": "Google.Cloud.BigQuery.Storage.V1Beta", + "go_package": "cloud.google.com/go/bigquery/storage/apiv1beta/storagepb;storagepb", + "java_multiple_files": true, + "java_outer_classname": "MetastorePartitionProto", + "java_package": "com.google.cloud.bigquery.storage.v1beta", + "php_namespace": "Google\\Cloud\\BigQuery\\Storage\\V1beta", + "(google.api.resource_definition).type": "bigquery.googleapis.com/Table", + "(google.api.resource_definition).pattern": "projects/{project}/datasets/{dataset}/tables/{table}" + }, + "nested": { + "MetastorePartitionService": { + "options": { + "(google.api.default_host)": "bigquerystorage.googleapis.com", + "(google.api.oauth_scopes)": "https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + }, + "methods": { + "BatchCreateMetastorePartitions": { + "requestType": "BatchCreateMetastorePartitionsRequest", + "responseType": "BatchCreateMetastorePartitionsResponse", + "options": { + "(google.api.http).post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchCreate", + "body": "*" + } + } + ] + }, + "BatchDeleteMetastorePartitions": { + "requestType": "BatchDeleteMetastorePartitionsRequest", + "responseType": "google.protobuf.Empty", + "options": { + "(google.api.http).post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchDelete", + "body": "*" + } + } + ] + }, + "BatchUpdateMetastorePartitions": { + "requestType": "BatchUpdateMetastorePartitionsRequest", + "responseType": "BatchUpdateMetastorePartitionsResponse", + "options": { + "(google.api.http).post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate", + "(google.api.http).body": "*" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "post": "/v1beta/{parent=projects/*/datasets/*/tables/*}/partitions:batchUpdate", + "body": "*" + } + } + ] + }, + "ListMetastorePartitions": { + "requestType": "ListMetastorePartitionsRequest", + "responseType": "ListMetastorePartitionsResponse", + "options": { + "(google.api.http).get": "/v1beta/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list", + "(google.api.method_signature)": "parent" + }, + "parsedOptions": [ + { + "(google.api.http)": { + "get": "/v1beta/{parent=projects/*/locations/*/datasets/*/tables/*}/partitions:list" + } + }, + { + "(google.api.method_signature)": "parent" + } + ] + }, + "StreamMetastorePartitions": { + "requestType": "StreamMetastorePartitionsRequest", + "requestStream": true, + "responseType": "StreamMetastorePartitionsResponse", + "responseStream": true + } + } + }, + "CreateMetastorePartitionRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "metastorePartition": { + "type": "MetastorePartition", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + } + } + }, + "BatchCreateMetastorePartitionsRequest": { + "fields": { + "parent": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "bigquery.googleapis.com/Table" + } + }, + "requests": { + "rule": "repeated", + "type": "CreateMetastorePartitionRequest", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "skipExistingPartitions": { + "type": "bool", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, @@ -1355,6 +1870,13 @@ "options": { "(google.api.field_behavior)": "REQUIRED" } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, @@ -1393,6 +1915,13 @@ "options": { "(google.api.field_behavior)": "REQUIRED" } + }, + "traceId": { + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, @@ -1421,6 +1950,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "traceId": { + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js index 51e8727307d..f9552ee4a0c 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js @@ -43,9 +43,17 @@ function main(parent, requests) { * add_partitions(..). If the flag is set to false, the server will return * ALREADY_EXISTS if any partition already exists. If the flag is set to true, * the server will skip existing partitions and insert only the non-existing - * partitions. + * partitions. A maximum of 900 partitions can be inserted in a batch. */ // const skipExistingPartitions = true + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + */ + // const traceId = 'abc123' // Imports the Storage library const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js index 1de77bffda5..10d3f93907a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js @@ -36,9 +36,17 @@ function main(parent, partitionValues) { // const parent = 'abc123' /** * Required. The list of metastore partitions (identified by its values) to be - * deleted. A maximum of 100 partitions can be deleted in a batch. + * deleted. A maximum of 900 partitions can be deleted in a batch. */ // const partitionValues = [1,2,3,4] + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + */ + // const traceId = 'abc123' // Imports the Storage library const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js index ebe7964749f..0d662372fb4 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js @@ -38,6 +38,14 @@ function main(parent, requests) { * Required. Requests to update metastore partitions in the table. */ // const requests = [1,2,3,4] + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + */ + // const traceId = 'abc123' // Imports the Storage library const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js index 6ec4c6effe7..7b4faf856fd 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js @@ -46,6 +46,14 @@ function main(parent) { * Restricted to a maximum length for 1 MB. */ // const filter = 'abc123' + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + */ + // const traceId = 'abc123' // Imports the Storage library const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1alpha; diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index e2f4a8a9991..5a822918abf 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 68, + "end": 76, "type": "FULL" } ], @@ -42,6 +42,10 @@ { "name": "skip_existing_partitions", "type": "TYPE_BOOL" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" } ], "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsResponse", @@ -70,7 +74,7 @@ "segments": [ { "start": 25, - "end": 61, + "end": 69, "type": "FULL" } ], @@ -86,6 +90,10 @@ { "name": "partition_values", "type": "TYPE_MESSAGE[]" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" } ], "resultType": ".google.protobuf.Empty", @@ -114,7 +122,7 @@ "segments": [ { "start": 25, - "end": 60, + "end": 68, "type": "FULL" } ], @@ -130,6 +138,10 @@ { "name": "requests", "type": "TYPE_MESSAGE[]" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" } ], "resultType": ".google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsResponse", @@ -158,7 +170,7 @@ "segments": [ { "start": 25, - "end": 67, + "end": 75, "type": "FULL" } ], @@ -174,6 +186,10 @@ { "name": "filter", "type": "TYPE_STRING" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" } ], "resultType": ".google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsResponse", diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js new file mode 100644 index 00000000000..ce5d32f2515 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js @@ -0,0 +1,84 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, requests) { + // [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to where the metastore partitions to be + * added, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. Requests to add metastore partitions to the table. + */ + // const requests = [1,2,3,4] + /** + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS if any partition already exists. If the flag is set to true, + * the server will skip existing partitions and insert only the non-existing + * partitions. A maximum of 900 partitions can be inserted in a batch. + */ + // const skipExistingPartitions = true + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + */ + // const traceId = 'abc123' + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1beta; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchCreateMetastorePartitions() { + // Construct request + const request = { + parent, + requests, + }; + + // Run request + const response = await storageClient.batchCreateMetastorePartitions(request); + console.log(response); + } + + callBatchCreateMetastorePartitions(); + // [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js new file mode 100644 index 00000000000..f289e265b93 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js @@ -0,0 +1,77 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, partitionValues) { + // [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. The list of metastore partitions (identified by its values) to be + * deleted. A maximum of 900 partitions can be deleted in a batch. + */ + // const partitionValues = [1,2,3,4] + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + */ + // const traceId = 'abc123' + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1beta; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchDeleteMetastorePartitions() { + // Construct request + const request = { + parent, + partitionValues, + }; + + // Run request + const response = await storageClient.batchDeleteMetastorePartitions(request); + console.log(response); + } + + callBatchDeleteMetastorePartitions(); + // [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js new file mode 100644 index 00000000000..4ef034bffbf --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js @@ -0,0 +1,76 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, requests) { + // [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Required. Requests to update metastore partitions in the table. + */ + // const requests = [1,2,3,4] + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + */ + // const traceId = 'abc123' + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1beta; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callBatchUpdateMetastorePartitions() { + // Construct request + const request = { + parent, + requests, + }; + + // Run request + const response = await storageClient.batchUpdateMetastorePartitions(request); + console.log(response); + } + + callBatchUpdateMetastorePartitions(); + // [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js new file mode 100644 index 00000000000..188046ccc07 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js @@ -0,0 +1,84 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Optional. SQL text filtering statement, similar to a WHERE clause in a + * query. Only supports single-row expressions. Aggregate functions are not + * supported. + * Examples: + * * "int_field > 5" + * * "date_field = CAST('2014-9-27' as DATE)" + * * "nullable_field is not NULL" + * * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + * * "numeric_field BETWEEN 1.0 AND 5.0" + * Restricted to a maximum length of 1 MB. + */ + // const filter = 'abc123' + /** + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + */ + // const traceId = 'abc123' + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1beta; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callListMetastorePartitions() { + // Construct request + const request = { + parent, + }; + + // Run request + const response = await storageClient.listMetastorePartitions(request); + console.log(response); + } + + callListMetastorePartitions(); + // [END bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js new file mode 100644 index 00000000000..a773fb97054 --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js @@ -0,0 +1,82 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. Reference to the table to where the partition to be added, in the + * format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + */ + // const parent = 'abc123' + /** + * Optional. A list of metastore partitions to be added to the table. + */ + // const metastorePartitions = [1,2,3,4] + /** + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS on commit if any partition already exists. If the flag is + * set to true: + * 1) the server will skip existing partitions + * insert only the non-existing partitions as part of the commit. + * 2) The client must set the `skip_existing_partitions` field to true for + * all requests in the stream. + */ + // const skipExistingPartitions = true + + // Imports the Storage library + const {MetastorePartitionServiceClient} = require('@google-cloud/storage').v1beta; + + // Instantiates a client + const storageClient = new MetastorePartitionServiceClient(); + + async function callStreamMetastorePartitions() { + // Construct request + const request = { + parent, + }; + + // Run request + const stream = await storageClient.streamMetastorePartitions(); + stream.on('data', (response) => { console.log(response) }); + stream.on('error', (err) => { throw(err) }); + stream.on('end', () => { /* API call completed */ }); + stream.write(request); + stream.end(); + } + + callStreamMetastorePartitions(); + // [END bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json new file mode 100644 index 00000000000..1eb43f38cec --- /dev/null +++ b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json @@ -0,0 +1,259 @@ +{ + "clientLibrary": { + "name": "nodejs-storage", + "version": "5.0.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta", + "version": "v1beta" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", + "title": "MetastorePartitionService batchCreateMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Adds metastore partitions to a table.", + "canonical": true, + "file": "metastore_partition_service.batch_create_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 76, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchCreateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "requests", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "skip_existing_partitions", + "type": "TYPE_BOOL" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchCreateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", + "title": "MetastorePartitionService batchDeleteMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Deletes metastore partitions from a table.", + "canonical": true, + "file": "metastore_partition_service.batch_delete_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchDeleteMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "partition_values", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchDeleteMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", + "title": "MetastorePartitionService batchUpdateMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Updates metastore partitions in a table.", + "canonical": true, + "file": "metastore_partition_service.batch_update_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 68, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "BatchUpdateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "requests", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "BatchUpdateMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async", + "title": "MetastorePartitionService listMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " Gets metastore partitions from a table.", + "canonical": true, + "file": "metastore_partition_service.list_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 76, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + }, + { + "name": "trace_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "ListMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService" + } + } + } + }, + { + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async", + "title": "MetastorePartitionService streamMetastorePartitions Sample", + "origin": "API_DEFINITION", + "description": " This is a bi-di streaming rpc method that allows the client to send a stream of partitions and commit all of them atomically at the end. If the commit is successful, the server will return a response and close the stream. If the commit fails (due to duplicate partitions or other reason), the server will close the stream with an error. This method is only available via the gRPC API (not REST).", + "canonical": true, + "file": "metastore_partition_service.stream_metastore_partitions.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 74, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StreamMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "metastore_partitions", + "type": "TYPE_MESSAGE[]" + }, + { + "name": "skip_existing_partitions", + "type": "TYPE_BOOL" + } + ], + "resultType": ".google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse", + "client": { + "shortName": "MetastorePartitionServiceClient", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionServiceClient" + }, + "method": { + "shortName": "StreamMetastorePartitions", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "shortName": "MetastorePartitionService", + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService" + } + } + } + } + ] +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/src/index.ts b/handwritten/bigquery-storage/src/index.ts index c095dfe9d57..154ed4d9bc7 100644 --- a/handwritten/bigquery-storage/src/index.ts +++ b/handwritten/bigquery-storage/src/index.ts @@ -19,6 +19,7 @@ import * as v1 from './v1'; import * as v1beta1 from './v1beta1'; import * as v1alpha from './v1alpha'; +import * as v1beta from './v1beta'; import * as managedwriter from './managedwriter'; import * as reader from './reader'; const BigQueryReadClient = v1.BigQueryReadClient; @@ -36,6 +37,7 @@ export { BigQueryReadClient, v1beta1, v1alpha, + v1beta, BigQueryStorageClient, BigQueryWriteClient, managedwriter, diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts index 80a367b70cc..67ba5f9d1eb 100644 --- a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts @@ -39,8 +39,8 @@ const version = require('../../../package.json').version; /** * BigQuery Metastore Partition Service API. - * This service is used for managing metastore partitions in BigQuery metastore. - * The service supports only batch operations for write. + * This service is used for managing metastore partitions in BigQuery + * metastore. The service supports only batch operations for write. * @class * @memberof v1alpha */ @@ -422,7 +422,13 @@ export class MetastorePartitionServiceClient { * add_partitions(..). If the flag is set to false, the server will return * ALREADY_EXISTS if any partition already exists. If the flag is set to true, * the server will skip existing partitions and insert only the non-existing - * partitions. + * partitions. A maximum of 900 partitions can be inserted in a batch. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. @@ -560,7 +566,13 @@ export class MetastorePartitionServiceClient { * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. * @param {number[]} request.partitionValues * Required. The list of metastore partitions (identified by its values) to be - * deleted. A maximum of 100 partitions can be deleted in a batch. + * deleted. A maximum of 900 partitions can be deleted in a batch. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. @@ -698,6 +710,12 @@ export class MetastorePartitionServiceClient { * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. * @param {number[]} request.requests * Required. Requests to update metastore partitions in the table. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. @@ -844,6 +862,12 @@ export class MetastorePartitionServiceClient { * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" * "numeric_field BETWEEN 1.0 AND 5.0" * Restricted to a maximum length for 1 MB. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. diff --git a/handwritten/bigquery-storage/src/v1beta/gapic_metadata.json b/handwritten/bigquery-storage/src/v1beta/gapic_metadata.json new file mode 100644 index 00000000000..3d1addb8607 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta/gapic_metadata.json @@ -0,0 +1,68 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.storage.v1beta", + "libraryPackage": "@google-cloud/storage", + "services": { + "MetastorePartitionService": { + "clients": { + "grpc": { + "libraryClient": "MetastorePartitionServiceClient", + "rpcs": { + "BatchCreateMetastorePartitions": { + "methods": [ + "batchCreateMetastorePartitions" + ] + }, + "BatchDeleteMetastorePartitions": { + "methods": [ + "batchDeleteMetastorePartitions" + ] + }, + "BatchUpdateMetastorePartitions": { + "methods": [ + "batchUpdateMetastorePartitions" + ] + }, + "ListMetastorePartitions": { + "methods": [ + "listMetastorePartitions" + ] + }, + "StreamMetastorePartitions": { + "methods": [ + "streamMetastorePartitions" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MetastorePartitionServiceClient", + "rpcs": { + "BatchCreateMetastorePartitions": { + "methods": [ + "batchCreateMetastorePartitions" + ] + }, + "BatchDeleteMetastorePartitions": { + "methods": [ + "batchDeleteMetastorePartitions" + ] + }, + "BatchUpdateMetastorePartitions": { + "methods": [ + "batchUpdateMetastorePartitions" + ] + }, + "ListMetastorePartitions": { + "methods": [ + "listMetastorePartitions" + ] + } + } + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta/index.ts b/handwritten/bigquery-storage/src/v1beta/index.ts new file mode 100644 index 00000000000..c934f7b7787 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta/index.ts @@ -0,0 +1,19 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MetastorePartitionServiceClient} from './metastore_partition_service_client'; diff --git a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts new file mode 100644 index 00000000000..93b471cf123 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts @@ -0,0 +1,1161 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type { + Callback, + CallOptions, + Descriptors, + ClientOptions, +} from 'google-gax'; +import {PassThrough} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; + +/** + * Client JSON configuration object, loaded from + * `src/v1beta/metastore_partition_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './metastore_partition_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * BigQuery Metastore Partition Service API. + * This service is used for managing metastore partitions in BigQuery + * metastore. The service supports only batch operations for write. + * @class + * @memberof v1beta + */ +export class MetastorePartitionServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + private _universeDomain: string; + private _servicePath: string; + private _log = logging.log('storage'); + + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + metastorePartitionServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MetastorePartitionServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP/1.1 REST mode. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MetastorePartitionServiceClient({fallback: true}, gax); + * ``` + */ + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback, + ) { + // Ensure that options include all the required fields. + const staticMembers = this + .constructor as typeof MetastorePartitionServiceClient; + if ( + opts?.universe_domain && + opts?.universeDomain && + opts?.universe_domain !== opts?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.', + ); + } + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + this._universeDomain = + opts?.universeDomain ?? + opts?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; + this._servicePath = 'bigquerystorage.' + this._universeDomain; + const servicePath = + opts?.servicePath || opts?.apiEndpoint || this._servicePath; + this._providedCustomServicePath = !!( + opts?.servicePath || opts?.apiEndpoint + ); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = + opts?.fallback ?? + (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // Request numeric enum values if REST transport is used. + opts.numericEnums = true; + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== this._servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = this._servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === this._servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; + if (typeof process === 'object' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + readStreamPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/sessions/{session}/streams/{stream}', + ), + tablePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/datasets/{dataset}/tables/{table}', + ), + }; + + // Some of the methods on this service provide streaming responses. + // Provide descriptors for these. + this.descriptors.stream = { + streamMetastorePartitions: new this._gaxModule.StreamDescriptor( + this._gaxModule.StreamType.BIDI_STREAMING, + !!opts.fallback, + !!opts.gaxServerStreamingRetries, + ), + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.storage.v1beta.MetastorePartitionService', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')}, + ); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.metastorePartitionServiceStub) { + return this.metastorePartitionServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.storage.v1beta.MetastorePartitionService. + this.metastorePartitionServiceStub = this._gaxGrpc.createStub( + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.storage.v1beta.MetastorePartitionService', + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.storage.v1beta + .MetastorePartitionService, + this._opts, + this._providedCustomServicePath, + ) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const metastorePartitionServiceStubMethods = [ + 'batchCreateMetastorePartitions', + 'batchDeleteMetastorePartitions', + 'batchUpdateMetastorePartitions', + 'listMetastorePartitions', + 'streamMetastorePartitions', + ]; + for (const methodName of metastorePartitionServiceStubMethods) { + const callPromise = this.metastorePartitionServiceStub.then( + stub => + (...args: Array<{}>) => { + if (this._terminated) { + if (methodName in this.descriptors.stream) { + const stream = new PassThrough({objectMode: true}); + setImmediate(() => { + stream.emit( + 'error', + new this._gaxModule.GoogleError( + 'The client has already been closed.', + ), + ); + }); + return stream; + } + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error | null | undefined) => () => { + throw err; + }, + ); + + const descriptor = this.descriptors.stream[methodName] || undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback, + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.metastorePartitionServiceStub; + } + + /** + * The DNS address for this API service. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static servicePath is deprecated, please use the instance method instead.', + 'DeprecationWarning', + ); + } + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath. + * @deprecated Use the apiEndpoint method of the client instance. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + process.emitWarning( + 'Static apiEndpoint is deprecated, please use the instance method instead.', + 'DeprecationWarning', + ); + } + return 'bigquerystorage.googleapis.com'; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + get apiEndpoint() { + return this._servicePath; + } + + get universeDomain() { + return this._universeDomain; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId( + callback?: Callback, + ): Promise | void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- + /** + * Adds metastore partitions to a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to where the metastore partitions to be + * added, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.requests + * Required. Requests to add metastore partitions to the table. + * @param {boolean} [request.skipExistingPartitions] + * Optional. Mimics the ifNotExists flag in IMetaStoreClient + * add_partitions(..). If the flag is set to false, the server will return + * ALREADY_EXISTS if any partition already exists. If the flag is set to true, + * the server will skip existing partitions and insert only the non-existing + * partitions. A maximum of 900 partitions can be inserted in a batch. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse|BatchCreateMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js + * region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async + */ + batchCreateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, + options?: CallOptions, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchCreateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchCreateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchCreateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchCreateMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchCreateMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchCreateMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchCreateMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); + } + /** + * Deletes metastore partitions from a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.partitionValues + * Required. The list of metastore partitions (identified by its values) to be + * deleted. A maximum of 900 partitions can be deleted in a batch. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.protobuf.Empty|Empty}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js + * region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async + */ + batchDeleteMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, + options?: CallOptions, + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchDeleteMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchDeleteMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchDeleteMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchDeleteMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchDeleteMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchDeleteMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchDeleteMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchDeleteMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); + } + /** + * Updates metastore partitions in a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {number[]} request.requests + * Required. Requests to update metastore partitions in the table. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. This is expected, but not required, to be + * globally unique. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse|BatchUpdateMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js + * region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async + */ + batchUpdateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, + options?: CallOptions, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + batchUpdateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchUpdateMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + batchUpdateMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchUpdateMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info( + 'batchUpdateMetastorePartitions response %j', + response, + ); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchUpdateMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info( + 'batchUpdateMetastorePartitions response %j', + response, + ); + return [response, options, rawResponse]; + }, + ); + } + /** + * Gets metastore partitions from a table. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Reference to the table to which these metastore partitions + * belong, in the format of + * projects/{project}/locations/{location}/datasets/{dataset}/tables/{table}. + * @param {string} [request.filter] + * Optional. SQL text filtering statement, similar to a WHERE clause in a + * query. Only supports single-row expressions. Aggregate functions are not + * supported. + * + * Examples: + * * "int_field > 5" + * * "date_field = CAST('2014-9-27' as DATE)" + * * "nullable_field is not NULL" + * * "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + * * "numeric_field BETWEEN 1.0 AND 5.0" + * + * Restricted to a maximum length of 1 MB. + * @param {string} [request.traceId] + * Optional. Optional trace id to be used for debugging. It is expected that + * the client sets the same `trace_id` for all the batches in the same + * operation, so that it is possible to tie together the logs to all the + * batches in the same operation. Limited to 256 characters. This is expected, + * but not required, to be globally unique. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse|ListMetastorePartitionsResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js + * region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async + */ + listMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, + options?: CallOptions, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + >; + listMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + listMetastorePartitions( + request: protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, + callback: Callback< + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + listMetastorePartitions( + request?: protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + >, + ): Promise< + [ + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + parent: request.parent ?? '', + }); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listMetastorePartitions request %j', request); + const wrappedCallback: + | Callback< + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('listMetastorePartitions response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .listMetastorePartitions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse, + ( + | protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('listMetastorePartitions response %j', response); + return [response, options, rawResponse]; + }, + ); + } + + /** + * This is a bi-di streaming rpc method that allows the client to send + * a stream of partitions and commit all of them atomically at the end. + * If the commit is successful, the server will return a + * response and close the stream. If the commit fails (due to duplicate + * partitions or other reason), the server will close the stream with an + * error. This method is only available via the gRPC API (not REST). + * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which is both readable and writable. It accepts objects + * representing {@link protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest|StreamMetastorePartitionsRequest} for write() method, and + * will emit objects representing {@link protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse|StreamMetastorePartitionsResponse} on 'data' event asynchronously. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#bi-directional-streaming | documentation } + * for more details and examples. + * @example include:samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js + * region_tag:bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async + */ + streamMetastorePartitions(options?: CallOptions): gax.CancellableStream { + this.initialize().catch(err => { + throw err; + }); + this._log.info('streamMetastorePartitions stream %j', options); + return this.innerApiCalls.streamMetastorePartitions(null, options); + } + + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified readStream resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} session + * @param {string} stream + * @returns {string} Resource name string. + */ + readStreamPath( + project: string, + location: string, + session: string, + stream: string, + ) { + return this.pathTemplates.readStreamPathTemplate.render({ + project: project, + location: location, + session: session, + stream: stream, + }); + } + + /** + * Parse the project from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the project. + */ + matchProjectFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .project; + } + + /** + * Parse the location from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the location. + */ + matchLocationFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .location; + } + + /** + * Parse the session from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the session. + */ + matchSessionFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .session; + } + + /** + * Parse the stream from ReadStream resource. + * + * @param {string} readStreamName + * A fully-qualified path representing ReadStream resource. + * @returns {string} A string representing the stream. + */ + matchStreamFromReadStreamName(readStreamName: string) { + return this.pathTemplates.readStreamPathTemplate.match(readStreamName) + .stream; + } + + /** + * Return a fully-qualified table resource name string. + * + * @param {string} project + * @param {string} dataset + * @param {string} table + * @returns {string} Resource name string. + */ + tablePath(project: string, dataset: string, table: string) { + return this.pathTemplates.tablePathTemplate.render({ + project: project, + dataset: dataset, + table: table, + }); + } + + /** + * Parse the project from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the project. + */ + matchProjectFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).project; + } + + /** + * Parse the dataset from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the dataset. + */ + matchDatasetFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).dataset; + } + + /** + * Parse the table from Table resource. + * + * @param {string} tableName + * A fully-qualified path representing Table resource. + * @returns {string} A string representing the table. + */ + matchTableFromTableName(tableName: string) { + return this.pathTemplates.tablePathTemplate.match(tableName).table; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.metastorePartitionServiceStub && !this._terminated) { + return this.metastorePartitionServiceStub.then(stub => { + this._log.info('ending gRPC channel'); + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client_config.json b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client_config.json new file mode 100644 index 00000000000..a0ecd3a69e1 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client_config.json @@ -0,0 +1,54 @@ +{ + "interfaces": { + "google.cloud.bigquery.storage.v1beta.MetastorePartitionService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "BatchCreateMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchDeleteMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "BatchUpdateMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "ListMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "StreamMetastorePartitions": { + "timeout_millis": 240000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_proto_list.json b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_proto_list.json new file mode 100644 index 00000000000..7899307d859 --- /dev/null +++ b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_proto_list.json @@ -0,0 +1,4 @@ +[ + "../../protos/google/cloud/bigquery/storage/v1beta/metastore_partition.proto", + "../../protos/google/cloud/bigquery/storage/v1beta/partition.proto" +] diff --git a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts index 3f3d79d9c3f..0672ab7ec6f 100644 --- a/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts +++ b/handwritten/bigquery-storage/test/gapic_big_query_read_v1.ts @@ -206,9 +206,7 @@ describe('v1.BigQueryReadClient', () => { projectId: 'bogus', }); assert.strictEqual(client.bigQueryReadStub, undefined); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); assert(client.bigQueryReadStub); }); @@ -288,9 +286,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); @@ -322,9 +318,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); @@ -372,9 +366,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); @@ -406,9 +398,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.CreateReadSessionRequest(), ); @@ -432,9 +422,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); @@ -465,9 +453,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); @@ -514,9 +500,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); @@ -547,9 +531,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.SplitReadStreamRequest(), ); @@ -572,9 +554,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); @@ -620,9 +600,7 @@ describe('v1.BigQueryReadClient', () => { projectId: 'bogus', gaxServerStreamingRetries: true, }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); @@ -667,9 +645,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); @@ -714,9 +690,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1.ReadRowsRequest(), ); @@ -765,9 +739,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.projectPathTemplate.render = sinon .stub() .returns(fakePath); @@ -807,9 +779,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.readSessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -874,9 +844,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.readStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -951,9 +919,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.tablePathTemplate.render = sinon .stub() .returns(fakePath); @@ -1018,9 +984,7 @@ describe('v1.BigQueryReadClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.writeStreamPathTemplate.render = sinon .stub() .returns(fakePath); diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts index cf8ad078d22..5746d5e08b6 100644 --- a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts +++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1alpha.ts @@ -215,9 +215,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { }, ); assert.strictEqual(client.metastorePartitionServiceStub, undefined); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); assert(client.metastorePartitionServiceStub); }); @@ -312,9 +310,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); @@ -349,9 +345,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); @@ -401,9 +395,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); @@ -440,9 +432,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchCreateMetastorePartitionsRequest(), ); @@ -471,9 +461,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); @@ -508,9 +496,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); @@ -560,9 +546,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); @@ -599,9 +583,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchDeleteMetastorePartitionsRequest(), ); @@ -630,9 +612,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); @@ -667,9 +647,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); @@ -719,9 +697,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); @@ -758,9 +734,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.BatchUpdateMetastorePartitionsRequest(), ); @@ -789,9 +763,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); @@ -826,9 +798,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); @@ -878,9 +848,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); @@ -917,9 +885,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.ListMetastorePartitionsRequest(), ); @@ -948,9 +914,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(), ); @@ -998,9 +962,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); const request = generateSampleMessage( new protos.google.cloud.bigquery.storage.v1alpha.StreamMetastorePartitionsRequest(), ); @@ -1055,9 +1017,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.readStreamPathTemplate.render = sinon .stub() .returns(fakePath); @@ -1135,9 +1095,7 @@ describe('v1alpha.MetastorePartitionServiceClient', () => { projectId: 'bogus', }, ); - await client.initialize().catch(err => { - throw err; - }); + await client.initialize(); client.pathTemplates.tablePathTemplate.render = sinon .stub() .returns(fakePath); diff --git a/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts new file mode 100644 index 00000000000..da3e55a9c51 --- /dev/null +++ b/handwritten/bigquery-storage/test/gapic_metastore_partition_service_v1beta.ts @@ -0,0 +1,1151 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as metastorepartitionserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +// Dynamically loaded proto JSON is needed to get the type information +// to fill in default values for request objects +const root = protobuf.Root.fromJSON( + require('../protos/protos.json'), +).resolveAll(); + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function getTypeDefaultValue(typeName: string, fields: string[]) { + let type = root.lookupType(typeName) as protobuf.Type; + for (const field of fields.slice(0, -1)) { + type = type.fields[field]?.resolvedType as protobuf.Type; + } + return type.fields[fields[fields.length - 1]]?.defaultValue; +} + +function generateSampleMessage(instance: T) { + const filledObject = ( + instance.constructor as typeof protobuf.Message + ).toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject, + ) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error, +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); +} + +function stubBidiStreamingCall( + response?: ResponseType, + error?: Error, +) { + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + return sinon.stub().returns(mockStream); +} + +describe('v1beta.MetastorePartitionServiceClient', () => { + describe('Common methods', () => { + it('has apiEndpoint', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(); + const apiEndpoint = client.apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + }); + + it('has universeDomain', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(); + const universeDomain = client.universeDomain; + assert.strictEqual(universeDomain, 'googleapis.com'); + }); + + if ( + typeof process === 'object' && + typeof process.emitWarning === 'function' + ) { + it('throws DeprecationWarning if static servicePath is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const servicePath = + metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient + .servicePath; + assert.strictEqual(servicePath, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + + it('throws DeprecationWarning if static apiEndpoint is used', () => { + const stub = sinon.stub(process, 'emitWarning'); + const apiEndpoint = + metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient + .apiEndpoint; + assert.strictEqual(apiEndpoint, 'bigquerystorage.googleapis.com'); + assert(stub.called); + stub.restore(); + }); + } + it('sets apiEndpoint according to universe domain camelCase', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + {universeDomain: 'example.com'}, + ); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + it('sets apiEndpoint according to universe domain snakeCase', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + {universe_domain: 'example.com'}, + ); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + }); + + if (typeof process === 'object' && 'env' in process) { + describe('GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variable', () => { + it('sets apiEndpoint from environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(); + const servicePath = client.apiEndpoint; + assert.strictEqual(servicePath, 'bigquerystorage.example.com'); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + + it('value configured in code has priority over environment variable', () => { + const saved = process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = 'example.com'; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + {universeDomain: 'configured.example.com'}, + ); + const servicePath = client.apiEndpoint; + assert.strictEqual( + servicePath, + 'bigquerystorage.configured.example.com', + ); + if (saved) { + process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; + } else { + delete process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN']; + } + }); + }); + } + it('does not allow setting both universeDomain and universe_domain', () => { + assert.throws(() => { + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + {universe_domain: 'example.com', universeDomain: 'example.net'}, + ); + }); + }); + + it('has port', () => { + const port = + metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient + .port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + fallback: true, + }, + ); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + assert.strictEqual(client.metastorePartitionServiceStub, undefined); + await client.initialize(); + assert(client.metastorePartitionServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + client.initialize().catch(err => { + throw err; + }); + assert(client.metastorePartitionServiceStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); + }); + + it('has close method for the non-initialized client', done => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + assert.strictEqual(client.metastorePartitionServiceStub, undefined); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('batchCreateMetastorePartitions', () => { + it('invokes batchCreateMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse(), + ); + client.innerApiCalls.batchCreateMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchCreateMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsResponse(), + ); + client.innerApiCalls.batchCreateMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchCreateMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta.IBatchCreateMetastorePartitionsResponse | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchCreateMetastorePartitions = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects( + client.batchCreateMetastorePartitions(request), + expectedError, + ); + const actualRequest = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchCreateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchCreateMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchCreateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close().catch(err => { + throw err; + }); + await assert.rejects( + client.batchCreateMetastorePartitions(request), + expectedError, + ); + }); + }); + + describe('batchDeleteMetastorePartitions', () => { + it('invokes batchDeleteMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.innerApiCalls.batchDeleteMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchDeleteMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.innerApiCalls.batchDeleteMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchDeleteMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchDeleteMetastorePartitions = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects( + client.batchDeleteMetastorePartitions(request), + expectedError, + ); + const actualRequest = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchDeleteMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchDeleteMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchDeleteMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close().catch(err => { + throw err; + }); + await assert.rejects( + client.batchDeleteMetastorePartitions(request), + expectedError, + ); + }); + }); + + describe('batchUpdateMetastorePartitions', () => { + it('invokes batchUpdateMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse(), + ); + client.innerApiCalls.batchUpdateMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.batchUpdateMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsResponse(), + ); + client.innerApiCalls.batchUpdateMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.batchUpdateMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta.IBatchUpdateMetastorePartitionsResponse | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedError = new Error('expected'); + client.innerApiCalls.batchUpdateMetastorePartitions = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects( + client.batchUpdateMetastorePartitions(request), + expectedError, + ); + const actualRequest = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.batchUpdateMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes batchUpdateMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.BatchUpdateMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close().catch(err => { + throw err; + }); + await assert.rejects( + client.batchUpdateMetastorePartitions(request), + expectedError, + ); + }); + }); + + describe('listMetastorePartitions', () => { + it('invokes listMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse(), + ); + client.innerApiCalls.listMetastorePartitions = + stubSimpleCall(expectedResponse); + const [response] = await client.listMetastorePartitions(request); + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions without error using callback', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsResponse(), + ); + client.innerApiCalls.listMetastorePartitions = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMetastorePartitions( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.storage.v1beta.IListMetastorePartitionsResponse | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; + const expectedError = new Error('expected'); + client.innerApiCalls.listMetastorePartitions = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects( + client.listMetastorePartitions(request), + expectedError, + ); + const actualRequest = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[0]; + assert.deepStrictEqual(actualRequest, request); + const actualHeaderRequestParams = ( + client.innerApiCalls.listMetastorePartitions as SinonStub + ).getCall(0).args[1].otherArgs.headers['x-goog-request-params']; + assert(actualHeaderRequestParams.includes(expectedHeaderRequestParams)); + }); + + it('invokes listMetastorePartitions with closed client', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest(), + ); + const defaultValue1 = getTypeDefaultValue( + '.google.cloud.bigquery.storage.v1beta.ListMetastorePartitionsRequest', + ['parent'], + ); + request.parent = defaultValue1; + const expectedError = new Error('The client has already been closed.'); + client.close().catch(err => { + throw err; + }); + await assert.rejects( + client.listMetastorePartitions(request), + expectedError, + ); + }); + }); + + describe('streamMetastorePartitions', () => { + it('invokes streamMetastorePartitions without error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest(), + ); + + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse(), + ); + client.innerApiCalls.streamMetastorePartitions = + stubBidiStreamingCall(expectedResponse); + const stream = client.streamMetastorePartitions(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse, + ) => { + resolve(response); + }, + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.streamMetastorePartitions as SinonStub) + .getCall(0) + .calledWith(null), + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request, + ); + }); + + it('invokes streamMetastorePartitions with error', async () => { + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsRequest(), + ); + const expectedError = new Error('expected'); + client.innerApiCalls.streamMetastorePartitions = stubBidiStreamingCall( + undefined, + expectedError, + ); + const stream = client.streamMetastorePartitions(); + const promise = new Promise((resolve, reject) => { + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.storage.v1beta.StreamMetastorePartitionsResponse, + ) => { + resolve(response); + }, + ); + stream.on('error', (err: Error) => { + reject(err); + }); + stream.write(request); + stream.end(); + }); + await assert.rejects(promise, expectedError); + assert( + (client.innerApiCalls.streamMetastorePartitions as SinonStub) + .getCall(0) + .calledWith(null), + ); + assert.deepStrictEqual( + ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) + .args[0], + request, + ); + }); + }); + + describe('Path templates', () => { + describe('readStream', async () => { + const fakePath = '/rendered/path/readStream'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + session: 'sessionValue', + stream: 'streamValue', + }; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + client.pathTemplates.readStreamPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.readStreamPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('readStreamPath', () => { + const result = client.readStreamPath( + 'projectValue', + 'locationValue', + 'sessionValue', + 'streamValue', + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.readStreamPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters), + ); + }); + + it('matchProjectFromReadStreamName', () => { + const result = client.matchProjectFromReadStreamName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + + it('matchLocationFromReadStreamName', () => { + const result = client.matchLocationFromReadStreamName(fakePath); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + + it('matchSessionFromReadStreamName', () => { + const result = client.matchSessionFromReadStreamName(fakePath); + assert.strictEqual(result, 'sessionValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + + it('matchStreamFromReadStreamName', () => { + const result = client.matchStreamFromReadStreamName(fakePath); + assert.strictEqual(result, 'streamValue'); + assert( + (client.pathTemplates.readStreamPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + }); + + describe('table', async () => { + const fakePath = '/rendered/path/table'; + const expectedParameters = { + project: 'projectValue', + dataset: 'datasetValue', + table: 'tableValue', + }; + const client = + new metastorepartitionserviceModule.v1beta.MetastorePartitionServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }, + ); + await client.initialize(); + client.pathTemplates.tablePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.tablePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('tablePath', () => { + const result = client.tablePath( + 'projectValue', + 'datasetValue', + 'tableValue', + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.tablePathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters), + ); + }); + + it('matchProjectFromTableName', () => { + const result = client.matchProjectFromTableName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + + it('matchDatasetFromTableName', () => { + const result = client.matchDatasetFromTableName(fakePath); + assert.strictEqual(result, 'datasetValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + + it('matchTableFromTableName', () => { + const result = client.matchTableFromTableName(fakePath); + assert.strictEqual(result, 'tableValue'); + assert( + (client.pathTemplates.tablePathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath), + ); + }); + }); + }); +}); From 16d00c2f25674707b8313ff5ec990b9b08880243 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 27 May 2025 10:27:40 -0400 Subject: [PATCH 305/333] fix: limit protobufjs version to one that breaks (#567) --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 7813816dc24..4eb7ada5f2a 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -35,7 +35,7 @@ "google-gax": "^5.0.1-rc.0" }, "peerDependencies": { - "protobufjs": "^7.2.4" + "protobufjs": "^7.2.4 - 7.5.0" }, "devDependencies": { "@google-cloud/bigquery": "^8.0.0", From 8f7a702d8e6d5aeb169309aa6be0b15172f578cf Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 27 May 2025 11:03:12 -0400 Subject: [PATCH 306/333] test: increase timeout for BQ Storage Read test (#566) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/system-test/reader_client_test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts index 063930bae13..c8e242b033f 100644 --- a/handwritten/bigquery-storage/system-test/reader_client_test.ts +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -564,7 +564,7 @@ describe('reader.ReaderClient', () => { } finally { client.close(); } - }).timeout(30 * 1000); + }).timeout(60 * 1000); }); describe('Error Scenarios', () => { From 95fa3b82ca5cf41192833baba09a2670af3c9e11 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Tue, 27 May 2025 16:33:01 -0400 Subject: [PATCH 307/333] feat: support precise dates and microsecond resolution (#569) --- handwritten/bigquery-storage/package.json | 3 +- .../src/managedwriter/encoder.ts | 38 +++++++++++++------ .../system-test/managed_writer_client_test.ts | 21 +++++----- 3 files changed, 39 insertions(+), 23 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 4eb7ada5f2a..599a2ca41b5 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -28,6 +28,7 @@ }, "dependencies": { "@google-cloud/paginator": "^6.0.0", + "@google-cloud/precise-date": "^5.0.0", "apache-arrow": "^19.0.1", "core-js": "^3.41.0", "extend": "^3.0.2", @@ -66,4 +67,4 @@ "engines": { "node": ">=18" } -} \ No newline at end of file +} diff --git a/handwritten/bigquery-storage/src/managedwriter/encoder.ts b/handwritten/bigquery-storage/src/managedwriter/encoder.ts index 3963e335fb8..f3172339eda 100644 --- a/handwritten/bigquery-storage/src/managedwriter/encoder.ts +++ b/handwritten/bigquery-storage/src/managedwriter/encoder.ts @@ -21,6 +21,7 @@ import { } from '../adapt/proto'; import * as extend from 'extend'; import {JSONObject, JSONValue} from './json_writer'; +import {PreciseDate} from '@google-cloud/precise-date'; type IDescriptorProto = protos.google.protobuf.IDescriptorProto; type DescriptorProto = protos.google.protobuf.DescriptorProto; @@ -127,18 +128,8 @@ export class JSONEncoder { if (!pfield) { return undefined; } - if (value instanceof Date) { - switch (pfield.type) { - case 'int32': // DATE - // The value is the number of days since the Unix epoch (1970-01-01) - return value.getTime() / (1000 * 60 * 60 * 24); - case 'int64': // TIMESTAMP - // The value is given in microseconds since the Unix epoch (1970-01-01) - return value.getTime() * 1000; - case 'string': // DATETIME - return value.toJSON().replace(/^(.*)T(.*)Z$/, '$1 $2'); - } - return undefined; + if (value instanceof Date || value instanceof PreciseDate) { + return this.encodeDateValue(pfield.type, value); } // NUMERIC and BIGNUMERIC integer if (typeof value === 'number' || typeof value === 'bigint') { @@ -168,6 +159,29 @@ export class JSONEncoder { return undefined; } + private encodeDateValue( + fieldType: string, + value: Date | PreciseDate, + ): JSONValue | undefined { + switch (fieldType) { + case 'int32': // DATE + // The value is the number of days since the Unix epoch (1970-01-01) + return value.getTime() / (1000 * 60 * 60 * 24); + case 'int64': { + // TIMESTAMP + let microseconds = 0; + if (value instanceof PreciseDate) { + microseconds = value.getMicroseconds(); + } + // The value is given in microseconds since the Unix epoch (1970-01-01) + return value.getTime() * 1000 + microseconds; + } + case 'string': // DATETIME + return value.toJSON().replace(/^(.*)T(.*)Z$/, '$1 $2'); + } + return undefined; + } + private getSubType(key: string, ptype: protobuf.Type): protobuf.Type { const pfield = ptype.fields[key]; if (!pfield) { diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index aa3c8b07a0a..dc355b5dfe1 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -27,6 +27,7 @@ import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; +import {PreciseDate} from '@google-cloud/precise-date'; const pkg = JSON.parse( readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8'), @@ -500,12 +501,12 @@ describe('managedwriter.WriterClient', () => { row_num: 1, customer_birthday: new Date('1815-12-10'), customer_metadata: { - customer_created_at: new Date('2022-01-09T03:49:46.564Z'), - customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + customer_created_at: new Date('2022-01-09T03:49:46.564000Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564000Z'), }, customer_last_purchase_dates: [ - new Date('2022-01-09T03:49:46.564Z'), - new Date('2023-01-09T03:49:46.564Z'), + new PreciseDate('2022-01-09T03:49:46.564321Z'), + new PreciseDate('2023-01-09T03:49:46.564321Z'), ], }; @@ -515,12 +516,12 @@ describe('managedwriter.WriterClient', () => { row_num: 2, customer_birthday: new Date('1912-07-23'), customer_metadata: { - customer_created_at: new Date('2022-01-09T03:49:46.564Z'), - customer_updated_at: new Date('2023-01-09T03:49:46.564Z'), + customer_created_at: new Date('2022-01-09T03:49:46.564000Z'), + customer_updated_at: new Date('2023-01-09T03:49:46.564000Z'), }, customer_last_purchase_dates: [ - new Date('2022-01-09T03:49:46.564Z'), - new Date('2023-01-09T03:49:46.564Z'), + new PreciseDate('2022-01-09T03:49:46.564321Z'), + new PreciseDate('2023-01-09T03:49:46.564321Z'), ], }; @@ -537,7 +538,7 @@ describe('managedwriter.WriterClient', () => { customer_created_at: '2022-01-09 03:49:46.564', customer_updated_at: '1673236186564000', }, - customer_last_purchase_dates: ['1641700186564000', '1673236186564000'], + customer_last_purchase_dates: ['1641700186564321', '1673236186564321'], }); const encodedRow2 = encoded[1]; @@ -550,7 +551,7 @@ describe('managedwriter.WriterClient', () => { customer_created_at: '2022-01-09 03:49:46.564', customer_updated_at: '1673236186564000', }, - customer_last_purchase_dates: ['1641700186564000', '1673236186564000'], + customer_last_purchase_dates: ['1641700186564321', '1673236186564321'], }); }); From 88b9ae8ebf244454947ba51192ded2032ad3c632 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Fri, 6 Jun 2025 16:53:09 -0400 Subject: [PATCH 308/333] fix(managedwriter): improve checks before closing connection (#568) --- .../bigquery-storage/src/managedwriter/stream_connection.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts index e7e7dd1894d..fea550d2321 100644 --- a/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts +++ b/handwritten/bigquery-storage/src/managedwriter/stream_connection.ts @@ -374,7 +374,7 @@ export class StreamConnection extends EventEmitter { * Close the bi-directional stream connection. */ close() { - if (!this._connection) { + if (this.isConnectionClosed() || !this._connection) { return; } this._connection.end(); From 24ef5c228b6ea79179a6a950ab68682e6b41bee9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 11:03:22 -0400 Subject: [PATCH 309/333] chore(main): release 5.1.0 (#552) --- handwritten/bigquery-storage/CHANGELOG.md | 18 ++++++++++++++++++ handwritten/bigquery-storage/package.json | 2 +- ...adata.google.cloud.bigquery.storage.v1.json | 2 +- ...adata_google.cloud.bigquery.storage.v1.json | 2 +- ..._google.cloud.bigquery.storage.v1alpha.json | 2 +- ...a_google.cloud.bigquery.storage.v1beta.json | 2 +- ....google.cloud.bigquery.storage.v1beta1.json | 2 +- ..._google.cloud.bigquery.storage.v1beta1.json | 2 +- 8 files changed, 25 insertions(+), 7 deletions(-) diff --git a/handwritten/bigquery-storage/CHANGELOG.md b/handwritten/bigquery-storage/CHANGELOG.md index 9a35776e987..5bf3dce2c6d 100644 --- a/handwritten/bigquery-storage/CHANGELOG.md +++ b/handwritten/bigquery-storage/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [5.1.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v5.0.0...v5.1.0) (2025-06-06) + + +### Features + +* Add protobufjs 2023 edition support ([21122f5](https://github.com/googleapis/nodejs-bigquery-storage/commit/21122f5901fd5b6670c4a3271045a9b3828b7f6b)) +* Increased the number of partitions can be written in a single request ([21122f5](https://github.com/googleapis/nodejs-bigquery-storage/commit/21122f5901fd5b6670c4a3271045a9b3828b7f6b)) +* Selective gapic support for typescript generation ([21122f5](https://github.com/googleapis/nodejs-bigquery-storage/commit/21122f5901fd5b6670c4a3271045a9b3828b7f6b)) +* Support precise dates and microsecond resolution ([#569](https://github.com/googleapis/nodejs-bigquery-storage/issues/569)) ([2ae69a8](https://github.com/googleapis/nodejs-bigquery-storage/commit/2ae69a89309a538e413bf15639c0b9b11ee4cf47)) + + +### Bug Fixes + +* Catch dangling promises ([21122f5](https://github.com/googleapis/nodejs-bigquery-storage/commit/21122f5901fd5b6670c4a3271045a9b3828b7f6b)) +* Limit protobufjs version to one that breaks ([#567](https://github.com/googleapis/nodejs-bigquery-storage/issues/567)) ([61f92d4](https://github.com/googleapis/nodejs-bigquery-storage/commit/61f92d488400318f4669a42a88e1056b864143d1)) +* **managedwriter:** Improve checks before closing connection ([#568](https://github.com/googleapis/nodejs-bigquery-storage/issues/568)) ([338f8bb](https://github.com/googleapis/nodejs-bigquery-storage/commit/338f8bbf6ba0aec47c82888a8ec3cafdc30950e9)) +* Remove `location` from http annotations in all of the service requests ([21122f5](https://github.com/googleapis/nodejs-bigquery-storage/commit/21122f5901fd5b6670c4a3271045a9b3828b7f6b)) + ## [5.0.0](https://github.com/googleapis/nodejs-bigquery-storage/compare/v4.11.0...v5.0.0) (2025-05-08) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 599a2ca41b5..c54ac91588d 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -1,6 +1,6 @@ { "name": "@google-cloud/bigquery-storage", - "version": "5.0.0", + "version": "5.1.0", "description": "Client for the BigQuery Storage API", "repository": "googleapis/nodejs-bigquery-storage", "license": "Apache-2.0", diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json index 9f15ae55678..a02b7e548cb 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata.google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json index e13091a5cea..0f17af129f2 100644 --- a/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/handwritten/bigquery-storage/samples/generated/v1/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index 5a822918abf..3fedb87b58a 100644 --- a/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/handwritten/bigquery-storage/samples/generated/v1alpha/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json index 1eb43f38cec..12b215c9741 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta/snippet_metadata_google.cloud.bigquery.storage.v1beta.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json index 4dd83b7176f..8ced04685a1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata.google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { diff --git a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json index 4dd83b7176f..8ced04685a1 100644 --- a/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json +++ b/handwritten/bigquery-storage/samples/generated/v1beta1/snippet_metadata_google.cloud.bigquery.storage.v1beta1.json @@ -1,7 +1,7 @@ { "clientLibrary": { "name": "nodejs-storage", - "version": "5.0.0", + "version": "5.1.0", "language": "TYPESCRIPT", "apis": [ { From 746d0e59f18e92e9a20cb55f3089a961bc49d30c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 22 Jul 2025 11:22:27 -0400 Subject: [PATCH 310/333] chore: add node 24 in node ci test (#581) Source-Link: https://github.com/googleapis/synthtool/commit/1218bc231201438192c962136303b95f0a11a4f5 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:66c44f0ad8f6caaa4eb3fbe74f8c2b4de5a97c2b930cee069e712c447723ba95 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../bigquery-storage/protos/protos.d.ts | 522 ++++- handwritten/bigquery-storage/protos/protos.js | 2061 +++++++++++++++-- .../bigquery-storage/protos/protos.json | 215 +- 4 files changed, 2580 insertions(+), 222 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 60443342360..2a0311b85ab 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee -# created: 2025-04-08T17:33:08.498793944Z + digest: sha256:66c44f0ad8f6caaa4eb3fbe74f8c2b4de5a97c2b930cee069e712c447723ba95 +# created: 2025-07-08T20:57:17.642848562Z diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 0e82e207b13..0f9ad177a15 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -11569,6 +11569,7 @@ export namespace google { /** Edition enum. */ enum Edition { EDITION_UNKNOWN = 0, + EDITION_LEGACY = 900, EDITION_PROTO2 = 998, EDITION_PROTO3 = 999, EDITION_2023 = 1000, @@ -11599,6 +11600,9 @@ export namespace google { /** FileDescriptorProto weakDependency */ weakDependency?: (number[]|null); + /** FileDescriptorProto optionDependency */ + optionDependency?: (string[]|null); + /** FileDescriptorProto messageType */ messageType?: (google.protobuf.IDescriptorProto[]|null); @@ -11648,6 +11652,9 @@ export namespace google { /** FileDescriptorProto weakDependency. */ public weakDependency: number[]; + /** FileDescriptorProto optionDependency. */ + public optionDependency: string[]; + /** FileDescriptorProto messageType. */ public messageType: google.protobuf.IDescriptorProto[]; @@ -11782,6 +11789,9 @@ export namespace google { /** DescriptorProto reservedName */ reservedName?: (string[]|null); + + /** DescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents a DescriptorProto. */ @@ -11823,6 +11833,9 @@ export namespace google { /** DescriptorProto reservedName. */ public reservedName: string[]; + /** DescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new DescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -12670,6 +12683,9 @@ export namespace google { /** EnumDescriptorProto reservedName */ reservedName?: (string[]|null); + + /** EnumDescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents an EnumDescriptorProto. */ @@ -12696,6 +12712,9 @@ export namespace google { /** EnumDescriptorProto reservedName. */ public reservedName: string[]; + /** EnumDescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -13630,6 +13649,9 @@ export namespace google { /** FieldOptions features */ features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -13688,6 +13710,9 @@ export namespace google { /** FieldOptions features. */ public features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -13908,6 +13933,121 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + /** Properties of a FeatureSupport. */ + interface IFeatureSupport { + + /** FeatureSupport editionIntroduced */ + editionIntroduced?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport editionDeprecated */ + editionDeprecated?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport deprecationWarning */ + deprecationWarning?: (string|null); + + /** FeatureSupport editionRemoved */ + editionRemoved?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + } + + /** Represents a FeatureSupport. */ + class FeatureSupport implements IFeatureSupport { + + /** + * Constructs a new FeatureSupport. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FieldOptions.IFeatureSupport); + + /** FeatureSupport editionIntroduced. */ + public editionIntroduced: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport editionDeprecated. */ + public editionDeprecated: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport deprecationWarning. */ + public deprecationWarning: string; + + /** FeatureSupport editionRemoved. */ + public editionRemoved: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** + * Creates a new FeatureSupport instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSupport instance + */ + public static create(properties?: google.protobuf.FieldOptions.IFeatureSupport): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Verifies a FeatureSupport message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSupport + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. + * @param message FeatureSupport + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions.FeatureSupport, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSupport to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSupport + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } /** Properties of an OneofOptions. */ @@ -14146,6 +14286,9 @@ export namespace google { /** EnumValueOptions debugRedact */ debugRedact?: (boolean|null); + /** EnumValueOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -14168,6 +14311,9 @@ export namespace google { /** EnumValueOptions debugRedact. */ public debugRedact: boolean; + /** EnumValueOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -14757,6 +14903,12 @@ export namespace google { /** FeatureSet jsonFormat */ jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); + + /** FeatureSet enforceNamingStyle */ + enforceNamingStyle?: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle|null); + + /** FeatureSet defaultSymbolVisibility */ + defaultSymbolVisibility?: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null); } /** Represents a FeatureSet. */ @@ -14786,6 +14938,12 @@ export namespace google { /** FeatureSet jsonFormat. */ public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); + /** FeatureSet enforceNamingStyle. */ + public enforceNamingStyle: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle); + + /** FeatureSet defaultSymbolVisibility. */ + public defaultSymbolVisibility: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility); + /** * Creates a new FeatureSet instance using the specified properties. * @param [properties] Properties to set @@ -14908,6 +15066,116 @@ export namespace google { ALLOW = 1, LEGACY_BEST_EFFORT = 2 } + + /** EnforceNamingStyle enum. */ + enum EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2 + } + + /** Properties of a VisibilityFeature. */ + interface IVisibilityFeature { + } + + /** Represents a VisibilityFeature. */ + class VisibilityFeature implements IVisibilityFeature { + + /** + * Constructs a new VisibilityFeature. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FeatureSet.IVisibilityFeature); + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @param [properties] Properties to set + * @returns VisibilityFeature instance + */ + public static create(properties?: google.protobuf.FeatureSet.IVisibilityFeature): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Verifies a VisibilityFeature message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns VisibilityFeature + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @param message VisibilityFeature + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSet.VisibilityFeature, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this VisibilityFeature to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for VisibilityFeature + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace VisibilityFeature { + + /** DefaultSymbolVisibility enum. */ + enum DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + EXPORT_ALL = 1, + EXPORT_TOP_LEVEL = 2, + LOCAL_ALL = 3, + STRICT = 4 + } + } } /** Properties of a FeatureSetDefaults. */ @@ -15027,8 +15295,11 @@ export namespace google { /** FeatureSetEditionDefault edition */ edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - /** FeatureSetEditionDefault features */ - features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures */ + overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures */ + fixedFeatures?: (google.protobuf.IFeatureSet|null); } /** Represents a FeatureSetEditionDefault. */ @@ -15043,8 +15314,11 @@ export namespace google { /** FeatureSetEditionDefault edition. */ public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - /** FeatureSetEditionDefault features. */ - public features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures. */ + public overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures. */ + public fixedFeatures?: (google.protobuf.IFeatureSet|null); /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -15577,6 +15851,13 @@ export namespace google { } } + /** SymbolVisibility enum. */ + enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2 + } + /** Properties of a Duration. */ interface IDuration { @@ -17319,6 +17600,9 @@ export namespace google { /** CommonLanguageSettings destinations */ destinations?: (google.api.ClientLibraryDestination[]|null); + + /** CommonLanguageSettings selectiveGapicGeneration */ + selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); } /** Represents a CommonLanguageSettings. */ @@ -17336,6 +17620,9 @@ export namespace google { /** CommonLanguageSettings destinations. */ public destinations: google.api.ClientLibraryDestination[]; + /** CommonLanguageSettings selectiveGapicGeneration. */ + public selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @param [properties] Properties to set @@ -18036,6 +18323,9 @@ export namespace google { /** PythonSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** PythonSettings experimentalFeatures */ + experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); } /** Represents a PythonSettings. */ @@ -18050,6 +18340,9 @@ export namespace google { /** PythonSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** PythonSettings experimentalFeatures. */ + public experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); + /** * Creates a new PythonSettings instance using the specified properties. * @param [properties] Properties to set @@ -18128,6 +18421,118 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace PythonSettings { + + /** Properties of an ExperimentalFeatures. */ + interface IExperimentalFeatures { + + /** ExperimentalFeatures restAsyncIoEnabled */ + restAsyncIoEnabled?: (boolean|null); + + /** ExperimentalFeatures protobufPythonicTypesEnabled */ + protobufPythonicTypesEnabled?: (boolean|null); + + /** ExperimentalFeatures unversionedPackageDisabled */ + unversionedPackageDisabled?: (boolean|null); + } + + /** Represents an ExperimentalFeatures. */ + class ExperimentalFeatures implements IExperimentalFeatures { + + /** + * Constructs a new ExperimentalFeatures. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.PythonSettings.IExperimentalFeatures); + + /** ExperimentalFeatures restAsyncIoEnabled. */ + public restAsyncIoEnabled: boolean; + + /** ExperimentalFeatures protobufPythonicTypesEnabled. */ + public protobufPythonicTypesEnabled: boolean; + + /** ExperimentalFeatures unversionedPackageDisabled. */ + public unversionedPackageDisabled: boolean; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @param [properties] Properties to set + * @returns ExperimentalFeatures instance + */ + public static create(properties?: google.api.PythonSettings.IExperimentalFeatures): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Verifies an ExperimentalFeatures message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExperimentalFeatures + */ + public static fromObject(object: { [k: string]: any }): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @param message ExperimentalFeatures + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.PythonSettings.ExperimentalFeatures, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExperimentalFeatures + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Properties of a NodeSettings. */ interface INodeSettings { @@ -18454,6 +18859,9 @@ export namespace google { /** GoSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** GoSettings renamedServices */ + renamedServices?: ({ [k: string]: string }|null); } /** Represents a GoSettings. */ @@ -18468,6 +18876,9 @@ export namespace google { /** GoSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** GoSettings renamedServices. */ + public renamedServices: { [k: string]: string }; + /** * Creates a new GoSettings instance using the specified properties. * @param [properties] Properties to set @@ -18792,6 +19203,109 @@ export namespace google { PACKAGE_MANAGER = 20 } + /** Properties of a SelectiveGapicGeneration. */ + interface ISelectiveGapicGeneration { + + /** SelectiveGapicGeneration methods */ + methods?: (string[]|null); + + /** SelectiveGapicGeneration generateOmittedAsInternal */ + generateOmittedAsInternal?: (boolean|null); + } + + /** Represents a SelectiveGapicGeneration. */ + class SelectiveGapicGeneration implements ISelectiveGapicGeneration { + + /** + * Constructs a new SelectiveGapicGeneration. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ISelectiveGapicGeneration); + + /** SelectiveGapicGeneration methods. */ + public methods: string[]; + + /** SelectiveGapicGeneration generateOmittedAsInternal. */ + public generateOmittedAsInternal: boolean; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @param [properties] Properties to set + * @returns SelectiveGapicGeneration instance + */ + public static create(properties?: google.api.ISelectiveGapicGeneration): google.api.SelectiveGapicGeneration; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.SelectiveGapicGeneration; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.SelectiveGapicGeneration; + + /** + * Verifies a SelectiveGapicGeneration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SelectiveGapicGeneration + */ + public static fromObject(object: { [k: string]: any }): google.api.SelectiveGapicGeneration; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @param message SelectiveGapicGeneration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.SelectiveGapicGeneration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** LaunchStage enum. */ enum LaunchStage { LAUNCH_STAGE_UNSPECIFIED = 0, diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index dd704459344..e5b18720ae2 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -28198,6 +28198,7 @@ * @name google.protobuf.Edition * @enum {number} * @property {number} EDITION_UNKNOWN=0 EDITION_UNKNOWN value + * @property {number} EDITION_LEGACY=900 EDITION_LEGACY value * @property {number} EDITION_PROTO2=998 EDITION_PROTO2 value * @property {number} EDITION_PROTO3=999 EDITION_PROTO3 value * @property {number} EDITION_2023=1000 EDITION_2023 value @@ -28212,6 +28213,7 @@ protobuf.Edition = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[0] = "EDITION_UNKNOWN"] = 0; + values[valuesById[900] = "EDITION_LEGACY"] = 900; values[valuesById[998] = "EDITION_PROTO2"] = 998; values[valuesById[999] = "EDITION_PROTO3"] = 999; values[valuesById[1000] = "EDITION_2023"] = 1000; @@ -28236,6 +28238,7 @@ * @property {Array.|null} [dependency] FileDescriptorProto dependency * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [optionDependency] FileDescriptorProto optionDependency * @property {Array.|null} [messageType] FileDescriptorProto messageType * @property {Array.|null} [enumType] FileDescriptorProto enumType * @property {Array.|null} [service] FileDescriptorProto service @@ -28258,6 +28261,7 @@ this.dependency = []; this.publicDependency = []; this.weakDependency = []; + this.optionDependency = []; this.messageType = []; this.enumType = []; this.service = []; @@ -28308,6 +28312,14 @@ */ FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + /** + * FileDescriptorProto optionDependency. + * @member {Array.} optionDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.optionDependency = $util.emptyArray; + /** * FileDescriptorProto messageType. * @member {Array.} messageType @@ -28429,6 +28441,9 @@ writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 14, wireType 0 =*/112).int32(message.edition); + if (message.optionDependency != null && message.optionDependency.length) + for (var i = 0; i < message.optionDependency.length; ++i) + writer.uint32(/* id 15, wireType 2 =*/122).string(message.optionDependency[i]); return writer; }; @@ -28501,6 +28516,12 @@ message.weakDependency.push(reader.int32()); break; } + case 15: { + if (!(message.optionDependency && message.optionDependency.length)) + message.optionDependency = []; + message.optionDependency.push(reader.string()); + break; + } case 4: { if (!(message.messageType && message.messageType.length)) message.messageType = []; @@ -28603,6 +28624,13 @@ if (!$util.isInteger(message.weakDependency[i])) return "weakDependency: integer[] expected"; } + if (message.optionDependency != null && message.hasOwnProperty("optionDependency")) { + if (!Array.isArray(message.optionDependency)) + return "optionDependency: array expected"; + for (var i = 0; i < message.optionDependency.length; ++i) + if (!$util.isString(message.optionDependency[i])) + return "optionDependency: string[] expected"; + } if (message.messageType != null && message.hasOwnProperty("messageType")) { if (!Array.isArray(message.messageType)) return "messageType: array expected"; @@ -28657,6 +28685,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -28709,6 +28738,13 @@ for (var i = 0; i < object.weakDependency.length; ++i) message.weakDependency[i] = object.weakDependency[i] | 0; } + if (object.optionDependency) { + if (!Array.isArray(object.optionDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.optionDependency: array expected"); + message.optionDependency = []; + for (var i = 0; i < object.optionDependency.length; ++i) + message.optionDependency[i] = String(object.optionDependency[i]); + } if (object.messageType) { if (!Array.isArray(object.messageType)) throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); @@ -28772,6 +28808,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -28837,6 +28877,7 @@ object.extension = []; object.publicDependency = []; object.weakDependency = []; + object.optionDependency = []; } if (options.defaults) { object.name = ""; @@ -28893,6 +28934,11 @@ object.syntax = message.syntax; if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.optionDependency && message.optionDependency.length) { + object.optionDependency = []; + for (var j = 0; j < message.optionDependency.length; ++j) + object.optionDependency[j] = message.optionDependency[j]; + } return object; }; @@ -28941,6 +28987,7 @@ * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options * @property {Array.|null} [reservedRange] DescriptorProto reservedRange * @property {Array.|null} [reservedName] DescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] DescriptorProto visibility */ /** @@ -29046,6 +29093,14 @@ */ DescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * DescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.visibility = 0; + /** * Creates a new DescriptorProto instance using the specified properties. * @function create @@ -29098,6 +29153,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.visibility); return writer; }; @@ -29190,6 +29247,10 @@ message.reservedName.push(reader.string()); break; } + case 11: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -29303,6 +29364,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -29402,6 +29472,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -29431,6 +29521,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -29476,6 +29567,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -31520,6 +31613,7 @@ * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] EnumDescriptorProto visibility */ /** @@ -31580,6 +31674,14 @@ */ EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * EnumDescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.visibility = 0; + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @function create @@ -31617,6 +31719,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.visibility); return writer; }; @@ -31679,6 +31783,10 @@ message.reservedName.push(reader.string()); break; } + case 6: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -31747,6 +31855,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -31796,6 +31913,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -31820,6 +31957,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -31840,6 +31978,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -34158,6 +34298,7 @@ * @property {Array.|null} [targets] FieldOptions targets * @property {Array.|null} [editionDefaults] FieldOptions editionDefaults * @property {google.protobuf.IFeatureSet|null} [features] FieldOptions features + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] FieldOptions featureSupport * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {string|null} [".google.cloud.bigquery.storage.v1.columnName"] FieldOptions .google.cloud.bigquery.storage.v1.columnName * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior @@ -34279,6 +34420,14 @@ */ FieldOptions.prototype.features = null; + /** + * FieldOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.featureSupport = null; + /** * FieldOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -34361,6 +34510,8 @@ $root.google.protobuf.FieldOptions.EditionDefault.encode(message.editionDefaults[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim(); if (message.features != null && Object.hasOwnProperty.call(message, "features")) $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -34464,6 +34615,10 @@ message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } + case 22: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -34603,6 +34758,11 @@ if (error) return "features." + error; } + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -34794,6 +34954,11 @@ throw TypeError(".google.protobuf.FieldOptions.features: object expected"); message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.FieldOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); @@ -34893,6 +35058,7 @@ object.debugRedact = false; object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; object.features = null; + object.featureSupport = null; object[".google.api.resourceReference"] = null; object[".google.cloud.bigquery.storage.v1.columnName"] = null; } @@ -34926,6 +35092,8 @@ } if (message.features != null && message.hasOwnProperty("features")) object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -35200,6 +35368,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -35241,6 +35410,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -35340,122 +35513,604 @@ return EditionDefault; })(); - return FieldOptions; - })(); + FieldOptions.FeatureSupport = (function() { - protobuf.OneofOptions = (function() { + /** + * Properties of a FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @interface IFeatureSupport + * @property {google.protobuf.Edition|null} [editionIntroduced] FeatureSupport editionIntroduced + * @property {google.protobuf.Edition|null} [editionDeprecated] FeatureSupport editionDeprecated + * @property {string|null} [deprecationWarning] FeatureSupport deprecationWarning + * @property {google.protobuf.Edition|null} [editionRemoved] FeatureSupport editionRemoved + */ - /** - * Properties of an OneofOptions. - * @memberof google.protobuf - * @interface IOneofOptions - * @property {google.protobuf.IFeatureSet|null} [features] OneofOptions features - * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption - */ + /** + * Constructs a new FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @classdesc Represents a FeatureSupport. + * @implements IFeatureSupport + * @constructor + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + */ + function FeatureSupport(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } - /** - * Constructs a new OneofOptions. - * @memberof google.protobuf - * @classdesc Represents an OneofOptions. - * @implements IOneofOptions - * @constructor - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set - */ - function OneofOptions(properties) { - this.uninterpretedOption = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } + /** + * FeatureSupport editionIntroduced. + * @member {google.protobuf.Edition} editionIntroduced + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionIntroduced = 0; - /** - * OneofOptions features. - * @member {google.protobuf.IFeatureSet|null|undefined} features - * @memberof google.protobuf.OneofOptions - * @instance - */ - OneofOptions.prototype.features = null; + /** + * FeatureSupport editionDeprecated. + * @member {google.protobuf.Edition} editionDeprecated + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionDeprecated = 0; - /** - * OneofOptions uninterpretedOption. - * @member {Array.} uninterpretedOption - * @memberof google.protobuf.OneofOptions - * @instance - */ - OneofOptions.prototype.uninterpretedOption = $util.emptyArray; + /** + * FeatureSupport deprecationWarning. + * @member {string} deprecationWarning + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.deprecationWarning = ""; - /** - * Creates a new OneofOptions instance using the specified properties. - * @function create - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions=} [properties] Properties to set - * @returns {google.protobuf.OneofOptions} OneofOptions instance - */ - OneofOptions.create = function create(properties) { - return new OneofOptions(properties); - }; + /** + * FeatureSupport editionRemoved. + * @member {google.protobuf.Edition} editionRemoved + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionRemoved = 0; - /** - * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @function encode - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofOptions.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.features != null && Object.hasOwnProperty.call(message, "features")) - $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.uninterpretedOption != null && message.uninterpretedOption.length) - for (var i = 0; i < message.uninterpretedOption.length; ++i) - $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - return writer; - }; + /** + * Creates a new FeatureSupport instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport instance + */ + FeatureSupport.create = function create(properties) { + return new FeatureSupport(properties); + }; - /** - * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.OneofOptions - * @static - * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.editionIntroduced != null && Object.hasOwnProperty.call(message, "editionIntroduced")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.editionIntroduced); + if (message.editionDeprecated != null && Object.hasOwnProperty.call(message, "editionDeprecated")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.editionDeprecated); + if (message.deprecationWarning != null && Object.hasOwnProperty.call(message, "deprecationWarning")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.deprecationWarning); + if (message.editionRemoved != null && Object.hasOwnProperty.call(message, "editionRemoved")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.editionRemoved); + return writer; + }; - /** - * Decodes an OneofOptions message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.OneofOptions - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.OneofOptions} OneofOptions - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - OneofOptions.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - case 1: { - message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.editionIntroduced = reader.int32(); + break; + } + case 2: { + message.editionDeprecated = reader.int32(); + break; + } + case 3: { + message.deprecationWarning = reader.string(); + break; + } + case 4: { + message.editionRemoved = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); break; } - case 999: { - if (!(message.uninterpretedOption && message.uninterpretedOption.length)) + } + return message; + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSupport message. + * @function verify + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSupport.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + switch (message.editionIntroduced) { + default: + return "editionIntroduced: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + switch (message.editionDeprecated) { + default: + return "editionDeprecated: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + if (!$util.isString(message.deprecationWarning)) + return "deprecationWarning: string expected"; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + switch (message.editionRemoved) { + default: + return "editionRemoved: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + return null; + }; + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + */ + FeatureSupport.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions.FeatureSupport) + return object; + var message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + switch (object.editionIntroduced) { + default: + if (typeof object.editionIntroduced === "number") { + message.editionIntroduced = object.editionIntroduced; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionIntroduced = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionIntroduced = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionIntroduced = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionIntroduced = 999; + break; + case "EDITION_2023": + case 1000: + message.editionIntroduced = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionIntroduced = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionIntroduced = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionIntroduced = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionIntroduced = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionIntroduced = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionIntroduced = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionIntroduced = 2147483647; + break; + } + switch (object.editionDeprecated) { + default: + if (typeof object.editionDeprecated === "number") { + message.editionDeprecated = object.editionDeprecated; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionDeprecated = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionDeprecated = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionDeprecated = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionDeprecated = 999; + break; + case "EDITION_2023": + case 1000: + message.editionDeprecated = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionDeprecated = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionDeprecated = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionDeprecated = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionDeprecated = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionDeprecated = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionDeprecated = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionDeprecated = 2147483647; + break; + } + if (object.deprecationWarning != null) + message.deprecationWarning = String(object.deprecationWarning); + switch (object.editionRemoved) { + default: + if (typeof object.editionRemoved === "number") { + message.editionRemoved = object.editionRemoved; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionRemoved = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionRemoved = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionRemoved = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionRemoved = 999; + break; + case "EDITION_2023": + case 1000: + message.editionRemoved = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionRemoved = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionRemoved = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionRemoved = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionRemoved = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionRemoved = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionRemoved = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionRemoved = 2147483647; + break; + } + return message; + }; + + /** + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.FeatureSupport} message FeatureSupport + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FeatureSupport.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.editionIntroduced = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.editionDeprecated = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.deprecationWarning = ""; + object.editionRemoved = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + object.editionIntroduced = options.enums === String ? $root.google.protobuf.Edition[message.editionIntroduced] === undefined ? message.editionIntroduced : $root.google.protobuf.Edition[message.editionIntroduced] : message.editionIntroduced; + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + object.editionDeprecated = options.enums === String ? $root.google.protobuf.Edition[message.editionDeprecated] === undefined ? message.editionDeprecated : $root.google.protobuf.Edition[message.editionDeprecated] : message.editionDeprecated; + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + object.deprecationWarning = message.deprecationWarning; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + object.editionRemoved = options.enums === String ? $root.google.protobuf.Edition[message.editionRemoved] === undefined ? message.editionRemoved : $root.google.protobuf.Edition[message.editionRemoved] : message.editionRemoved; + return object; + }; + + /** + * Converts this FeatureSupport to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + * @returns {Object.} JSON object + */ + FeatureSupport.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FeatureSupport + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FeatureSupport.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions.FeatureSupport"; + }; + + return FeatureSupport; + })(); + + return FieldOptions; + })(); + + protobuf.OneofOptions = (function() { + + /** + * Properties of an OneofOptions. + * @memberof google.protobuf + * @interface IOneofOptions + * @property {google.protobuf.IFeatureSet|null} [features] OneofOptions features + * @property {Array.|null} [uninterpretedOption] OneofOptions uninterpretedOption + */ + + /** + * Constructs a new OneofOptions. + * @memberof google.protobuf + * @classdesc Represents an OneofOptions. + * @implements IOneofOptions + * @constructor + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + */ + function OneofOptions(properties) { + this.uninterpretedOption = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * OneofOptions features. + * @member {google.protobuf.IFeatureSet|null|undefined} features + * @memberof google.protobuf.OneofOptions + * @instance + */ + OneofOptions.prototype.features = null; + + /** + * OneofOptions uninterpretedOption. + * @member {Array.} uninterpretedOption + * @memberof google.protobuf.OneofOptions + * @instance + */ + OneofOptions.prototype.uninterpretedOption = $util.emptyArray; + + /** + * Creates a new OneofOptions instance using the specified properties. + * @function create + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions=} [properties] Properties to set + * @returns {google.protobuf.OneofOptions} OneofOptions instance + */ + OneofOptions.create = function create(properties) { + return new OneofOptions(properties); + }; + + /** + * Encodes the specified OneofOptions message. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encode + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.uninterpretedOption != null && message.uninterpretedOption.length) + for (var i = 0; i < message.uninterpretedOption.length; ++i) + $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified OneofOptions message, length delimited. Does not implicitly {@link google.protobuf.OneofOptions.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.OneofOptions + * @static + * @param {google.protobuf.IOneofOptions} message OneofOptions message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + OneofOptions.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an OneofOptions message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.OneofOptions + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.OneofOptions} OneofOptions + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + OneofOptions.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.OneofOptions(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 999: { + if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; message.uninterpretedOption.push($root.google.protobuf.UninterpretedOption.decode(reader, reader.uint32())); break; @@ -35932,6 +36587,7 @@ * @property {boolean|null} [deprecated] EnumValueOptions deprecated * @property {google.protobuf.IFeatureSet|null} [features] EnumValueOptions features * @property {boolean|null} [debugRedact] EnumValueOptions debugRedact + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] EnumValueOptions featureSupport * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption */ @@ -35975,6 +36631,14 @@ */ EnumValueOptions.prototype.debugRedact = false; + /** + * EnumValueOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.featureSupport = null; + /** * EnumValueOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -36013,6 +36677,8 @@ $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.debugRedact); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -36064,6 +36730,10 @@ message.debugRedact = reader.bool(); break; } + case 4: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -36116,6 +36786,11 @@ if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) if (typeof message.debugRedact !== "boolean") return "debugRedact: boolean expected"; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -36149,6 +36824,11 @@ } if (object.debugRedact != null) message.debugRedact = Boolean(object.debugRedact); + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); @@ -36181,6 +36861,7 @@ object.deprecated = false; object.features = null; object.debugRedact = false; + object.featureSupport = null; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; @@ -36188,6 +36869,8 @@ object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) object.debugRedact = message.debugRedact; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -37627,6 +38310,8 @@ * @property {google.protobuf.FeatureSet.Utf8Validation|null} [utf8Validation] FeatureSet utf8Validation * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat + * @property {google.protobuf.FeatureSet.EnforceNamingStyle|null} [enforceNamingStyle] FeatureSet enforceNamingStyle + * @property {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null} [defaultSymbolVisibility] FeatureSet defaultSymbolVisibility */ /** @@ -37692,6 +38377,22 @@ */ FeatureSet.prototype.jsonFormat = 0; + /** + * FeatureSet enforceNamingStyle. + * @member {google.protobuf.FeatureSet.EnforceNamingStyle} enforceNamingStyle + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.enforceNamingStyle = 0; + + /** + * FeatureSet defaultSymbolVisibility. + * @member {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility} defaultSymbolVisibility + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.defaultSymbolVisibility = 0; + /** * Creates a new FeatureSet instance using the specified properties. * @function create @@ -37728,6 +38429,10 @@ writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); + if (message.enforceNamingStyle != null && Object.hasOwnProperty.call(message, "enforceNamingStyle")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.enforceNamingStyle); + if (message.defaultSymbolVisibility != null && Object.hasOwnProperty.call(message, "defaultSymbolVisibility")) + writer.uint32(/* id 8, wireType 0 =*/64).int32(message.defaultSymbolVisibility); return writer; }; @@ -37788,6 +38493,14 @@ message.jsonFormat = reader.int32(); break; } + case 7: { + message.enforceNamingStyle = reader.int32(); + break; + } + case 8: { + message.defaultSymbolVisibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -37878,6 +38591,26 @@ case 2: break; } + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + switch (message.enforceNamingStyle) { + default: + return "enforceNamingStyle: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + switch (message.defaultSymbolVisibility) { + default: + return "defaultSymbolVisibility: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + break; + } return null; }; @@ -38017,6 +38750,54 @@ message.jsonFormat = 2; break; } + switch (object.enforceNamingStyle) { + default: + if (typeof object.enforceNamingStyle === "number") { + message.enforceNamingStyle = object.enforceNamingStyle; + break; + } + break; + case "ENFORCE_NAMING_STYLE_UNKNOWN": + case 0: + message.enforceNamingStyle = 0; + break; + case "STYLE2024": + case 1: + message.enforceNamingStyle = 1; + break; + case "STYLE_LEGACY": + case 2: + message.enforceNamingStyle = 2; + break; + } + switch (object.defaultSymbolVisibility) { + default: + if (typeof object.defaultSymbolVisibility === "number") { + message.defaultSymbolVisibility = object.defaultSymbolVisibility; + break; + } + break; + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + case 0: + message.defaultSymbolVisibility = 0; + break; + case "EXPORT_ALL": + case 1: + message.defaultSymbolVisibility = 1; + break; + case "EXPORT_TOP_LEVEL": + case 2: + message.defaultSymbolVisibility = 2; + break; + case "LOCAL_ALL": + case 3: + message.defaultSymbolVisibility = 3; + break; + case "STRICT": + case 4: + message.defaultSymbolVisibility = 4; + break; + } return message; }; @@ -38040,6 +38821,8 @@ object.utf8Validation = options.enums === String ? "UTF8_VALIDATION_UNKNOWN" : 0; object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; + object.enforceNamingStyle = options.enums === String ? "ENFORCE_NAMING_STYLE_UNKNOWN" : 0; + object.defaultSymbolVisibility = options.enums === String ? "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN" : 0; } if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; @@ -38053,6 +38836,10 @@ object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + object.enforceNamingStyle = options.enums === String ? $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] === undefined ? message.enforceNamingStyle : $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] : message.enforceNamingStyle; + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + object.defaultSymbolVisibility = options.enums === String ? $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] === undefined ? message.defaultSymbolVisibility : $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] : message.defaultSymbolVisibility; return object; }; @@ -38132,52 +38919,265 @@ return values; })(); - /** - * Utf8Validation enum. - * @name google.protobuf.FeatureSet.Utf8Validation - * @enum {number} - * @property {number} UTF8_VALIDATION_UNKNOWN=0 UTF8_VALIDATION_UNKNOWN value - * @property {number} VERIFY=2 VERIFY value - * @property {number} NONE=3 NONE value - */ - FeatureSet.Utf8Validation = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "UTF8_VALIDATION_UNKNOWN"] = 0; - values[valuesById[2] = "VERIFY"] = 2; - values[valuesById[3] = "NONE"] = 3; - return values; - })(); + /** + * Utf8Validation enum. + * @name google.protobuf.FeatureSet.Utf8Validation + * @enum {number} + * @property {number} UTF8_VALIDATION_UNKNOWN=0 UTF8_VALIDATION_UNKNOWN value + * @property {number} VERIFY=2 VERIFY value + * @property {number} NONE=3 NONE value + */ + FeatureSet.Utf8Validation = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "UTF8_VALIDATION_UNKNOWN"] = 0; + values[valuesById[2] = "VERIFY"] = 2; + values[valuesById[3] = "NONE"] = 3; + return values; + })(); + + /** + * MessageEncoding enum. + * @name google.protobuf.FeatureSet.MessageEncoding + * @enum {number} + * @property {number} MESSAGE_ENCODING_UNKNOWN=0 MESSAGE_ENCODING_UNKNOWN value + * @property {number} LENGTH_PREFIXED=1 LENGTH_PREFIXED value + * @property {number} DELIMITED=2 DELIMITED value + */ + FeatureSet.MessageEncoding = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MESSAGE_ENCODING_UNKNOWN"] = 0; + values[valuesById[1] = "LENGTH_PREFIXED"] = 1; + values[valuesById[2] = "DELIMITED"] = 2; + return values; + })(); + + /** + * JsonFormat enum. + * @name google.protobuf.FeatureSet.JsonFormat + * @enum {number} + * @property {number} JSON_FORMAT_UNKNOWN=0 JSON_FORMAT_UNKNOWN value + * @property {number} ALLOW=1 ALLOW value + * @property {number} LEGACY_BEST_EFFORT=2 LEGACY_BEST_EFFORT value + */ + FeatureSet.JsonFormat = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "JSON_FORMAT_UNKNOWN"] = 0; + values[valuesById[1] = "ALLOW"] = 1; + values[valuesById[2] = "LEGACY_BEST_EFFORT"] = 2; + return values; + })(); + + /** + * EnforceNamingStyle enum. + * @name google.protobuf.FeatureSet.EnforceNamingStyle + * @enum {number} + * @property {number} ENFORCE_NAMING_STYLE_UNKNOWN=0 ENFORCE_NAMING_STYLE_UNKNOWN value + * @property {number} STYLE2024=1 STYLE2024 value + * @property {number} STYLE_LEGACY=2 STYLE_LEGACY value + */ + FeatureSet.EnforceNamingStyle = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ENFORCE_NAMING_STYLE_UNKNOWN"] = 0; + values[valuesById[1] = "STYLE2024"] = 1; + values[valuesById[2] = "STYLE_LEGACY"] = 2; + return values; + })(); + + FeatureSet.VisibilityFeature = (function() { + + /** + * Properties of a VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @interface IVisibilityFeature + */ + + /** + * Constructs a new VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @classdesc Represents a VisibilityFeature. + * @implements IVisibilityFeature + * @constructor + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + */ + function VisibilityFeature(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature instance + */ + VisibilityFeature.create = function create(properties) { + return new VisibilityFeature(properties); + }; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet.VisibilityFeature(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a VisibilityFeature message. + * @function verify + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + VisibilityFeature.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + */ + VisibilityFeature.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSet.VisibilityFeature) + return object; + return new $root.google.protobuf.FeatureSet.VisibilityFeature(); + }; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.VisibilityFeature} message VisibilityFeature + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + VisibilityFeature.toObject = function toObject() { + return {}; + }; + + /** + * Converts this VisibilityFeature to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @instance + * @returns {Object.} JSON object + */ + VisibilityFeature.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for VisibilityFeature + * @function getTypeUrl + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + VisibilityFeature.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSet.VisibilityFeature"; + }; - /** - * MessageEncoding enum. - * @name google.protobuf.FeatureSet.MessageEncoding - * @enum {number} - * @property {number} MESSAGE_ENCODING_UNKNOWN=0 MESSAGE_ENCODING_UNKNOWN value - * @property {number} LENGTH_PREFIXED=1 LENGTH_PREFIXED value - * @property {number} DELIMITED=2 DELIMITED value - */ - FeatureSet.MessageEncoding = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "MESSAGE_ENCODING_UNKNOWN"] = 0; - values[valuesById[1] = "LENGTH_PREFIXED"] = 1; - values[valuesById[2] = "DELIMITED"] = 2; - return values; - })(); + /** + * DefaultSymbolVisibility enum. + * @name google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility + * @enum {number} + * @property {number} DEFAULT_SYMBOL_VISIBILITY_UNKNOWN=0 DEFAULT_SYMBOL_VISIBILITY_UNKNOWN value + * @property {number} EXPORT_ALL=1 EXPORT_ALL value + * @property {number} EXPORT_TOP_LEVEL=2 EXPORT_TOP_LEVEL value + * @property {number} LOCAL_ALL=3 LOCAL_ALL value + * @property {number} STRICT=4 STRICT value + */ + VisibilityFeature.DefaultSymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"] = 0; + values[valuesById[1] = "EXPORT_ALL"] = 1; + values[valuesById[2] = "EXPORT_TOP_LEVEL"] = 2; + values[valuesById[3] = "LOCAL_ALL"] = 3; + values[valuesById[4] = "STRICT"] = 4; + return values; + })(); - /** - * JsonFormat enum. - * @name google.protobuf.FeatureSet.JsonFormat - * @enum {number} - * @property {number} JSON_FORMAT_UNKNOWN=0 JSON_FORMAT_UNKNOWN value - * @property {number} ALLOW=1 ALLOW value - * @property {number} LEGACY_BEST_EFFORT=2 LEGACY_BEST_EFFORT value - */ - FeatureSet.JsonFormat = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "JSON_FORMAT_UNKNOWN"] = 0; - values[valuesById[1] = "ALLOW"] = 1; - values[valuesById[2] = "LEGACY_BEST_EFFORT"] = 2; - return values; + return VisibilityFeature; })(); return FeatureSet; @@ -38364,6 +39364,7 @@ default: return "minimumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -38381,6 +39382,7 @@ default: return "maximumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -38429,6 +39431,10 @@ case 0: message.minimumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.minimumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.minimumEdition = 998; @@ -38481,6 +39487,10 @@ case 0: message.maximumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.maximumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.maximumEdition = 998; @@ -38589,7 +39599,8 @@ * @memberof google.protobuf.FeatureSetDefaults * @interface IFeatureSetEditionDefault * @property {google.protobuf.Edition|null} [edition] FeatureSetEditionDefault edition - * @property {google.protobuf.IFeatureSet|null} [features] FeatureSetEditionDefault features + * @property {google.protobuf.IFeatureSet|null} [overridableFeatures] FeatureSetEditionDefault overridableFeatures + * @property {google.protobuf.IFeatureSet|null} [fixedFeatures] FeatureSetEditionDefault fixedFeatures */ /** @@ -38616,12 +39627,20 @@ FeatureSetEditionDefault.prototype.edition = 0; /** - * FeatureSetEditionDefault features. - * @member {google.protobuf.IFeatureSet|null|undefined} features + * FeatureSetEditionDefault overridableFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} overridableFeatures + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + */ + FeatureSetEditionDefault.prototype.overridableFeatures = null; + + /** + * FeatureSetEditionDefault fixedFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} fixedFeatures * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault * @instance */ - FeatureSetEditionDefault.prototype.features = null; + FeatureSetEditionDefault.prototype.fixedFeatures = null; /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -38647,10 +39666,12 @@ FeatureSetEditionDefault.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.features != null && Object.hasOwnProperty.call(message, "features")) - $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); + if (message.overridableFeatures != null && Object.hasOwnProperty.call(message, "overridableFeatures")) + $root.google.protobuf.FeatureSet.encode(message.overridableFeatures, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.fixedFeatures != null && Object.hasOwnProperty.call(message, "fixedFeatures")) + $root.google.protobuf.FeatureSet.encode(message.fixedFeatures, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; @@ -38691,8 +39712,12 @@ message.edition = reader.int32(); break; } - case 2: { - message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + case 4: { + message.overridableFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 5: { + message.fixedFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } default: @@ -38735,6 +39760,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -38747,10 +39773,15 @@ case 2147483647: break; } - if (message.features != null && message.hasOwnProperty("features")) { - var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.overridableFeatures); + if (error) + return "overridableFeatures." + error; + } + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.fixedFeatures); if (error) - return "features." + error; + return "fixedFeatures." + error; } return null; }; @@ -38778,6 +39809,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -38819,10 +39854,15 @@ message.edition = 2147483647; break; } - if (object.features != null) { - if (typeof object.features !== "object") - throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features: object expected"); - message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + if (object.overridableFeatures != null) { + if (typeof object.overridableFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridableFeatures: object expected"); + message.overridableFeatures = $root.google.protobuf.FeatureSet.fromObject(object.overridableFeatures); + } + if (object.fixedFeatures != null) { + if (typeof object.fixedFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixedFeatures: object expected"); + message.fixedFeatures = $root.google.protobuf.FeatureSet.fromObject(object.fixedFeatures); } return message; }; @@ -38841,13 +39881,16 @@ options = {}; var object = {}; if (options.defaults) { - object.features = null; object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.overridableFeatures = null; + object.fixedFeatures = null; } - if (message.features != null && message.hasOwnProperty("features")) - object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) + object.overridableFeatures = $root.google.protobuf.FeatureSet.toObject(message.overridableFeatures, options); + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) + object.fixedFeatures = $root.google.protobuf.FeatureSet.toObject(message.fixedFeatures, options); return object; }; @@ -40062,6 +41105,22 @@ return GeneratedCodeInfo; })(); + /** + * SymbolVisibility enum. + * @name google.protobuf.SymbolVisibility + * @enum {number} + * @property {number} VISIBILITY_UNSET=0 VISIBILITY_UNSET value + * @property {number} VISIBILITY_LOCAL=1 VISIBILITY_LOCAL value + * @property {number} VISIBILITY_EXPORT=2 VISIBILITY_EXPORT value + */ + protobuf.SymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "VISIBILITY_UNSET"] = 0; + values[valuesById[1] = "VISIBILITY_LOCAL"] = 1; + values[valuesById[2] = "VISIBILITY_EXPORT"] = 2; + return values; + })(); + protobuf.Duration = (function() { /** @@ -44055,6 +45114,7 @@ * @interface ICommonLanguageSettings * @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri * @property {Array.|null} [destinations] CommonLanguageSettings destinations + * @property {google.api.ISelectiveGapicGeneration|null} [selectiveGapicGeneration] CommonLanguageSettings selectiveGapicGeneration */ /** @@ -44089,6 +45149,14 @@ */ CommonLanguageSettings.prototype.destinations = $util.emptyArray; + /** + * CommonLanguageSettings selectiveGapicGeneration. + * @member {google.api.ISelectiveGapicGeneration|null|undefined} selectiveGapicGeneration + * @memberof google.api.CommonLanguageSettings + * @instance + */ + CommonLanguageSettings.prototype.selectiveGapicGeneration = null; + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @function create @@ -44121,6 +45189,8 @@ writer.int32(message.destinations[i]); writer.ldelim(); } + if (message.selectiveGapicGeneration != null && Object.hasOwnProperty.call(message, "selectiveGapicGeneration")) + $root.google.api.SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -44172,6 +45242,10 @@ message.destinations.push(reader.int32()); break; } + case 3: { + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -44223,6 +45297,11 @@ break; } } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) { + var error = $root.google.api.SelectiveGapicGeneration.verify(message.selectiveGapicGeneration); + if (error) + return "selectiveGapicGeneration." + error; + } return null; }; @@ -44265,6 +45344,11 @@ break; } } + if (object.selectiveGapicGeneration != null) { + if (typeof object.selectiveGapicGeneration !== "object") + throw TypeError(".google.api.CommonLanguageSettings.selectiveGapicGeneration: object expected"); + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.fromObject(object.selectiveGapicGeneration); + } return message; }; @@ -44283,8 +45367,10 @@ var object = {}; if (options.arrays || options.defaults) object.destinations = []; - if (options.defaults) + if (options.defaults) { object.referenceDocsUri = ""; + object.selectiveGapicGeneration = null; + } if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) object.referenceDocsUri = message.referenceDocsUri; if (message.destinations && message.destinations.length) { @@ -44292,6 +45378,8 @@ for (var j = 0; j < message.destinations.length; ++j) object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j]; } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) + object.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.toObject(message.selectiveGapicGeneration, options); return object; }; @@ -46114,6 +47202,7 @@ * @memberof google.api * @interface IPythonSettings * @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common + * @property {google.api.PythonSettings.IExperimentalFeatures|null} [experimentalFeatures] PythonSettings experimentalFeatures */ /** @@ -46139,6 +47228,14 @@ */ PythonSettings.prototype.common = null; + /** + * PythonSettings experimentalFeatures. + * @member {google.api.PythonSettings.IExperimentalFeatures|null|undefined} experimentalFeatures + * @memberof google.api.PythonSettings + * @instance + */ + PythonSettings.prototype.experimentalFeatures = null; + /** * Creates a new PythonSettings instance using the specified properties. * @function create @@ -46165,6 +47262,8 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.experimentalFeatures != null && Object.hasOwnProperty.call(message, "experimentalFeatures")) + $root.google.api.PythonSettings.ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -46205,6 +47304,10 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -46245,6 +47348,11 @@ if (error) return "common." + error; } + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) { + var error = $root.google.api.PythonSettings.ExperimentalFeatures.verify(message.experimentalFeatures); + if (error) + return "experimentalFeatures." + error; + } return null; }; @@ -46265,6 +47373,11 @@ throw TypeError(".google.api.PythonSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.experimentalFeatures != null) { + if (typeof object.experimentalFeatures !== "object") + throw TypeError(".google.api.PythonSettings.experimentalFeatures: object expected"); + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.fromObject(object.experimentalFeatures); + } return message; }; @@ -46281,38 +47394,294 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.common = null; + object.experimentalFeatures = null; + } if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) + object.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.toObject(message.experimentalFeatures, options); return object; }; - /** - * Converts this PythonSettings to JSON. - * @function toJSON - * @memberof google.api.PythonSettings - * @instance - * @returns {Object.} JSON object - */ - PythonSettings.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; + /** + * Converts this PythonSettings to JSON. + * @function toJSON + * @memberof google.api.PythonSettings + * @instance + * @returns {Object.} JSON object + */ + PythonSettings.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PythonSettings + * @function getTypeUrl + * @memberof google.api.PythonSettings + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PythonSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PythonSettings"; + }; + + PythonSettings.ExperimentalFeatures = (function() { + + /** + * Properties of an ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @interface IExperimentalFeatures + * @property {boolean|null} [restAsyncIoEnabled] ExperimentalFeatures restAsyncIoEnabled + * @property {boolean|null} [protobufPythonicTypesEnabled] ExperimentalFeatures protobufPythonicTypesEnabled + * @property {boolean|null} [unversionedPackageDisabled] ExperimentalFeatures unversionedPackageDisabled + */ + + /** + * Constructs a new ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @classdesc Represents an ExperimentalFeatures. + * @implements IExperimentalFeatures + * @constructor + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + */ + function ExperimentalFeatures(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExperimentalFeatures restAsyncIoEnabled. + * @member {boolean} restAsyncIoEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.restAsyncIoEnabled = false; + + /** + * ExperimentalFeatures protobufPythonicTypesEnabled. + * @member {boolean} protobufPythonicTypesEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.protobufPythonicTypesEnabled = false; + + /** + * ExperimentalFeatures unversionedPackageDisabled. + * @member {boolean} unversionedPackageDisabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.unversionedPackageDisabled = false; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @function create + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures instance + */ + ExperimentalFeatures.create = function create(properties) { + return new ExperimentalFeatures(properties); + }; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.restAsyncIoEnabled != null && Object.hasOwnProperty.call(message, "restAsyncIoEnabled")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.restAsyncIoEnabled); + if (message.protobufPythonicTypesEnabled != null && Object.hasOwnProperty.call(message, "protobufPythonicTypesEnabled")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.protobufPythonicTypesEnabled); + if (message.unversionedPackageDisabled != null && Object.hasOwnProperty.call(message, "unversionedPackageDisabled")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.unversionedPackageDisabled); + return writer; + }; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @function decode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.restAsyncIoEnabled = reader.bool(); + break; + } + case 2: { + message.protobufPythonicTypesEnabled = reader.bool(); + break; + } + case 3: { + message.unversionedPackageDisabled = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExperimentalFeatures message. + * @function verify + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExperimentalFeatures.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + if (typeof message.restAsyncIoEnabled !== "boolean") + return "restAsyncIoEnabled: boolean expected"; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + if (typeof message.protobufPythonicTypesEnabled !== "boolean") + return "protobufPythonicTypesEnabled: boolean expected"; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + if (typeof message.unversionedPackageDisabled !== "boolean") + return "unversionedPackageDisabled: boolean expected"; + return null; + }; + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} object Plain object + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + */ + ExperimentalFeatures.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.PythonSettings.ExperimentalFeatures) + return object; + var message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + if (object.restAsyncIoEnabled != null) + message.restAsyncIoEnabled = Boolean(object.restAsyncIoEnabled); + if (object.protobufPythonicTypesEnabled != null) + message.protobufPythonicTypesEnabled = Boolean(object.protobufPythonicTypesEnabled); + if (object.unversionedPackageDisabled != null) + message.unversionedPackageDisabled = Boolean(object.unversionedPackageDisabled); + return message; + }; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.ExperimentalFeatures} message ExperimentalFeatures + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExperimentalFeatures.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.restAsyncIoEnabled = false; + object.protobufPythonicTypesEnabled = false; + object.unversionedPackageDisabled = false; + } + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + object.restAsyncIoEnabled = message.restAsyncIoEnabled; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + object.protobufPythonicTypesEnabled = message.protobufPythonicTypesEnabled; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + object.unversionedPackageDisabled = message.unversionedPackageDisabled; + return object; + }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @function toJSON + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + * @returns {Object.} JSON object + */ + ExperimentalFeatures.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; - /** - * Gets the default type url for PythonSettings - * @function getTypeUrl - * @memberof google.api.PythonSettings - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - PythonSettings.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.PythonSettings"; - }; + /** + * Gets the default type url for ExperimentalFeatures + * @function getTypeUrl + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExperimentalFeatures.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PythonSettings.ExperimentalFeatures"; + }; + + return ExperimentalFeatures; + })(); return PythonSettings; })(); @@ -47190,6 +48559,7 @@ * @memberof google.api * @interface IGoSettings * @property {google.api.ICommonLanguageSettings|null} [common] GoSettings common + * @property {Object.|null} [renamedServices] GoSettings renamedServices */ /** @@ -47201,6 +48571,7 @@ * @param {google.api.IGoSettings=} [properties] Properties to set */ function GoSettings(properties) { + this.renamedServices = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -47215,6 +48586,14 @@ */ GoSettings.prototype.common = null; + /** + * GoSettings renamedServices. + * @member {Object.} renamedServices + * @memberof google.api.GoSettings + * @instance + */ + GoSettings.prototype.renamedServices = $util.emptyObject; + /** * Creates a new GoSettings instance using the specified properties. * @function create @@ -47241,6 +48620,9 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.renamedServices != null && Object.hasOwnProperty.call(message, "renamedServices")) + for (var keys = Object.keys(message.renamedServices), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedServices[keys[i]]).ldelim(); return writer; }; @@ -47271,7 +48653,7 @@ GoSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -47281,6 +48663,29 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + if (message.renamedServices === $util.emptyObject) + message.renamedServices = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.renamedServices[key] = value; + break; + } default: reader.skipType(tag & 7); break; @@ -47321,6 +48726,14 @@ if (error) return "common." + error; } + if (message.renamedServices != null && message.hasOwnProperty("renamedServices")) { + if (!$util.isObject(message.renamedServices)) + return "renamedServices: object expected"; + var key = Object.keys(message.renamedServices); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.renamedServices[key[i]])) + return "renamedServices: string{k:string} expected"; + } return null; }; @@ -47341,6 +48754,13 @@ throw TypeError(".google.api.GoSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.renamedServices) { + if (typeof object.renamedServices !== "object") + throw TypeError(".google.api.GoSettings.renamedServices: object expected"); + message.renamedServices = {}; + for (var keys = Object.keys(object.renamedServices), i = 0; i < keys.length; ++i) + message.renamedServices[keys[i]] = String(object.renamedServices[keys[i]]); + } return message; }; @@ -47357,10 +48777,18 @@ if (!options) options = {}; var object = {}; + if (options.objects || options.defaults) + object.renamedServices = {}; if (options.defaults) object.common = null; if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + var keys2; + if (message.renamedServices && (keys2 = Object.keys(message.renamedServices)).length) { + object.renamedServices = {}; + for (var j = 0; j < keys2.length; ++j) + object.renamedServices[keys2[j]] = message.renamedServices[keys2[j]]; + } return object; }; @@ -47999,6 +49427,251 @@ return values; })(); + api.SelectiveGapicGeneration = (function() { + + /** + * Properties of a SelectiveGapicGeneration. + * @memberof google.api + * @interface ISelectiveGapicGeneration + * @property {Array.|null} [methods] SelectiveGapicGeneration methods + * @property {boolean|null} [generateOmittedAsInternal] SelectiveGapicGeneration generateOmittedAsInternal + */ + + /** + * Constructs a new SelectiveGapicGeneration. + * @memberof google.api + * @classdesc Represents a SelectiveGapicGeneration. + * @implements ISelectiveGapicGeneration + * @constructor + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + */ + function SelectiveGapicGeneration(properties) { + this.methods = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SelectiveGapicGeneration methods. + * @member {Array.} methods + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.methods = $util.emptyArray; + + /** + * SelectiveGapicGeneration generateOmittedAsInternal. + * @member {boolean} generateOmittedAsInternal + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.generateOmittedAsInternal = false; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @function create + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration instance + */ + SelectiveGapicGeneration.create = function create(properties) { + return new SelectiveGapicGeneration(properties); + }; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.methods != null && message.methods.length) + for (var i = 0; i < message.methods.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.methods[i]); + if (message.generateOmittedAsInternal != null && Object.hasOwnProperty.call(message, "generateOmittedAsInternal")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.generateOmittedAsInternal); + return writer; + }; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @function decode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.SelectiveGapicGeneration(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.methods && message.methods.length)) + message.methods = []; + message.methods.push(reader.string()); + break; + } + case 2: { + message.generateOmittedAsInternal = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SelectiveGapicGeneration message. + * @function verify + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SelectiveGapicGeneration.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.methods != null && message.hasOwnProperty("methods")) { + if (!Array.isArray(message.methods)) + return "methods: array expected"; + for (var i = 0; i < message.methods.length; ++i) + if (!$util.isString(message.methods[i])) + return "methods: string[] expected"; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + if (typeof message.generateOmittedAsInternal !== "boolean") + return "generateOmittedAsInternal: boolean expected"; + return null; + }; + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} object Plain object + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + */ + SelectiveGapicGeneration.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.SelectiveGapicGeneration) + return object; + var message = new $root.google.api.SelectiveGapicGeneration(); + if (object.methods) { + if (!Array.isArray(object.methods)) + throw TypeError(".google.api.SelectiveGapicGeneration.methods: array expected"); + message.methods = []; + for (var i = 0; i < object.methods.length; ++i) + message.methods[i] = String(object.methods[i]); + } + if (object.generateOmittedAsInternal != null) + message.generateOmittedAsInternal = Boolean(object.generateOmittedAsInternal); + return message; + }; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.SelectiveGapicGeneration} message SelectiveGapicGeneration + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SelectiveGapicGeneration.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.methods = []; + if (options.defaults) + object.generateOmittedAsInternal = false; + if (message.methods && message.methods.length) { + object.methods = []; + for (var j = 0; j < message.methods.length; ++j) + object.methods[j] = message.methods[j]; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + object.generateOmittedAsInternal = message.generateOmittedAsInternal; + return object; + }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @function toJSON + * @memberof google.api.SelectiveGapicGeneration + * @instance + * @returns {Object.} JSON object + */ + SelectiveGapicGeneration.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @function getTypeUrl + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SelectiveGapicGeneration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.SelectiveGapicGeneration"; + }; + + return SelectiveGapicGeneration; + })(); + /** * LaunchStage enum. * @name google.api.LaunchStage diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 5568792a1ba..0307c1da4e2 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -2723,12 +2723,19 @@ "type": "FileDescriptorProto", "id": 1 } - } + }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ] }, "Edition": { "edition": "proto2", "values": { "EDITION_UNKNOWN": 0, + "EDITION_LEGACY": 900, "EDITION_PROTO2": 998, "EDITION_PROTO3": 999, "EDITION_2023": 1000, @@ -2767,6 +2774,11 @@ "type": "int32", "id": 11 }, + "optionDependency": { + "rule": "repeated", + "type": "string", + "id": 15 + }, "messageType": { "rule": "repeated", "type": "DescriptorProto", @@ -2855,6 +2867,10 @@ "rule": "repeated", "type": "string", "id": 10 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 11 } }, "nested": { @@ -3080,6 +3096,10 @@ "rule": "repeated", "type": "string", "id": 5 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 6 } }, "nested": { @@ -3294,6 +3314,7 @@ 42, 42 ], + "php_generic_services", [ 38, 38 @@ -3429,7 +3450,8 @@ "type": "bool", "id": 10, "options": { - "default": false + "default": false, + "deprecated": true } }, "debugRedact": { @@ -3457,6 +3479,10 @@ "type": "FeatureSet", "id": 21 }, + "featureSupport": { + "type": "FeatureSupport", + "id": 22 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -3526,6 +3552,26 @@ "id": 2 } } + }, + "FeatureSupport": { + "fields": { + "editionIntroduced": { + "type": "Edition", + "id": 1 + }, + "editionDeprecated": { + "type": "Edition", + "id": 2 + }, + "deprecationWarning": { + "type": "string", + "id": 3 + }, + "editionRemoved": { + "type": "Edition", + "id": 4 + } + } } } }, @@ -3614,6 +3660,10 @@ "default": false } }, + "featureSupport": { + "type": "FieldOptions.FeatureSupport", + "id": 4 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -3756,6 +3806,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_2023", "edition_defaults.value": "EXPLICIT" } @@ -3766,6 +3817,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "OPEN" } @@ -3776,6 +3828,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "PACKED" } @@ -3786,6 +3839,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "VERIFY" } @@ -3796,7 +3850,8 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "EDITION_PROTO2", + "feature_support.edition_introduced": "EDITION_2023", + "edition_defaults.edition": "EDITION_LEGACY", "edition_defaults.value": "LENGTH_PREFIXED" } }, @@ -3806,27 +3861,38 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "ALLOW" } + }, + "enforceNamingStyle": { + "type": "EnforceNamingStyle", + "id": 7, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_METHOD", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "STYLE2024" + } + }, + "defaultSymbolVisibility": { + "type": "VisibilityFeature.DefaultSymbolVisibility", + "id": 8, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "EXPORT_TOP_LEVEL" + } } }, "extensions": [ [ 1000, - 1000 - ], - [ - 1001, - 1001 - ], - [ - 1002, - 1002 - ], - [ - 9990, - 9990 + 9994 ], [ 9995, @@ -3871,7 +3937,13 @@ "UTF8_VALIDATION_UNKNOWN": 0, "VERIFY": 2, "NONE": 3 - } + }, + "reserved": [ + [ + 1, + 1 + ] + ] }, "MessageEncoding": { "values": { @@ -3886,6 +3958,33 @@ "ALLOW": 1, "LEGACY_BEST_EFFORT": 2 } + }, + "EnforceNamingStyle": { + "values": { + "ENFORCE_NAMING_STYLE_UNKNOWN": 0, + "STYLE2024": 1, + "STYLE_LEGACY": 2 + } + }, + "VisibilityFeature": { + "fields": {}, + "reserved": [ + [ + 1, + 536870911 + ] + ], + "nested": { + "DefaultSymbolVisibility": { + "values": { + "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": 0, + "EXPORT_ALL": 1, + "EXPORT_TOP_LEVEL": 2, + "LOCAL_ALL": 3, + "STRICT": 4 + } + } + } } } }, @@ -3913,11 +4012,26 @@ "type": "Edition", "id": 3 }, - "features": { + "overridableFeatures": { "type": "FeatureSet", - "id": 2 + "id": 4 + }, + "fixedFeatures": { + "type": "FeatureSet", + "id": 5 } - } + }, + "reserved": [ + [ + 1, + 1 + ], + [ + 2, + 2 + ], + "features" + ] } } }, @@ -3930,6 +4044,12 @@ "id": 1 } }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ], "nested": { "Location": { "fields": { @@ -4015,6 +4135,14 @@ } } }, + "SymbolVisibility": { + "edition": "proto2", + "values": { + "VISIBILITY_UNSET": 0, + "VISIBILITY_LOCAL": 1, + "VISIBILITY_EXPORT": 2 + } + }, "Duration": { "fields": { "seconds": { @@ -4143,8 +4271,7 @@ "java_multiple_files": true, "java_outer_classname": "ResourceProto", "java_package": "com.google.api", - "objc_class_prefix": "GAPI", - "cc_enable_arenas": true + "objc_class_prefix": "GAPI" }, "nested": { "http": { @@ -4268,6 +4395,10 @@ "rule": "repeated", "type": "ClientLibraryDestination", "id": 2 + }, + "selectiveGapicGeneration": { + "type": "SelectiveGapicGeneration", + "id": 3 } } }, @@ -4408,6 +4539,28 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "experimentalFeatures": { + "type": "ExperimentalFeatures", + "id": 2 + } + }, + "nested": { + "ExperimentalFeatures": { + "fields": { + "restAsyncIoEnabled": { + "type": "bool", + "id": 1 + }, + "protobufPythonicTypesEnabled": { + "type": "bool", + "id": 2 + }, + "unversionedPackageDisabled": { + "type": "bool", + "id": 3 + } + } } } }, @@ -4465,6 +4618,11 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "renamedServices": { + "keyType": "string", + "type": "string", + "id": 2 } } }, @@ -4526,6 +4684,19 @@ "PACKAGE_MANAGER": 20 } }, + "SelectiveGapicGeneration": { + "fields": { + "methods": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "generateOmittedAsInternal": { + "type": "bool", + "id": 2 + } + } + }, "LaunchStage": { "values": { "LAUNCH_STAGE_UNSPECIFIED": 0, From 6536d49ddaa41fdd6531419fbf7cc85b3b184d09 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 10:32:45 -0400 Subject: [PATCH 311/333] chore: Update response.yaml workflow (#589) Source-Link: https://github.com/googleapis/synthtool/commit/d47c856822d8952427121905e27e6415b95985e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:1053e41d7e29d0619500416721caffff36357aa3708074ebdfae024b38ef3a40 Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 2a0311b85ab..ddd5ae0a080 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:66c44f0ad8f6caaa4eb3fbe74f8c2b4de5a97c2b930cee069e712c447723ba95 -# created: 2025-07-08T20:57:17.642848562Z + digest: sha256:1053e41d7e29d0619500416721caffff36357aa3708074ebdfae024b38ef3a40 +# created: 2025-08-13T13:35:22.107446359Z From 3d1ffe16456e4765e04178f48a5f6fac1104c7d5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 16:33:12 -0400 Subject: [PATCH 312/333] chore: update ci.yaml template (#590) Source-Link: https://github.com/googleapis/synthtool/commit/d08775b0f7d3d00aa0ed1826e0d7d7b09bad1723 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:1861c5e2c9e12678f64f68c4ef449759f80c64299eb35a5e3c916eca46b0d2c4 Co-authored-by: Owl Bot --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index ddd5ae0a080..4dd3dd250e6 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:1053e41d7e29d0619500416721caffff36357aa3708074ebdfae024b38ef3a40 -# created: 2025-08-13T13:35:22.107446359Z + digest: sha256:1861c5e2c9e12678f64f68c4ef449759f80c64299eb35a5e3c916eca46b0d2c4 +# created: 2025-08-13T15:27:11.205698327Z From 3d9c48e10e8660ff9a2d18808f44c59ee0da3253 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 17:26:43 -0400 Subject: [PATCH 313/333] chore: update ci.yaml template node version (#591) Source-Link: https://github.com/googleapis/synthtool/commit/e6da477a0d5b99b118fc1175d2e2fc628a662de1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:312a74489156af12755b58050a7bfaea0c6908698376acc714a589dc7955aba6 Co-authored-by: Owl Bot Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 4dd3dd250e6..5c748c11f76 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:1861c5e2c9e12678f64f68c4ef449759f80c64299eb35a5e3c916eca46b0d2c4 -# created: 2025-08-13T15:27:11.205698327Z + digest: sha256:312a74489156af12755b58050a7bfaea0c6908698376acc714a589dc7955aba6 +# created: 2025-08-13T16:00:29.083454445Z From a3d43d866aefd0569a06cea3b8a877b41fd968a5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 20:41:04 -0400 Subject: [PATCH 314/333] chore: revert previous node-version template changes in ci.yaml (#594) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: revert previous node-version template changes in ci.yaml chore: revert previous node-version template changes Source-Link: https://github.com/googleapis/synthtool/commit/4b09ede35df7a2eb40ce6fb00617b71841c68b04 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:b612d739b0533e56ba174526ca339f264b63e911c30d6f83f55b57c38cc6ad2a * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 5c748c11f76..3037bc547d9 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:312a74489156af12755b58050a7bfaea0c6908698376acc714a589dc7955aba6 -# created: 2025-08-13T16:00:29.083454445Z + digest: sha256:b612d739b0533e56ba174526ca339f264b63e911c30d6f83f55b57c38cc6ad2a +# created: 2025-08-15T12:36:48.871481111Z From 26c187b72aafbc791c0b5325d0da9de53c79ea16 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 10:33:40 -0400 Subject: [PATCH 315/333] chore: revert previous node-version template updates (#593) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: revert previous node-version template updates * chore: update ci.yaml template * chore: update ci.yaml template node version * chore: revert previous node-version template changes Source-Link: https://github.com/googleapis/synthtool/commit/55f9ecb12170497c84d485174b620acd445b0ff8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:1d7c29870723d4d2e32870c6dcdc43d4aa93dcc6519272d025bfcf2ecd48f091 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index 3037bc547d9..be00544b5d7 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:b612d739b0533e56ba174526ca339f264b63e911c30d6f83f55b57c38cc6ad2a -# created: 2025-08-15T12:36:48.871481111Z + digest: sha256:1d7c29870723d4d2e32870c6dcdc43d4aa93dcc6519272d025bfcf2ecd48f091 +# created: 2025-08-14T17:16:30.591542591Z From 22f6d0bdba37d7795bea4fd816253c3badf37e77 Mon Sep 17 00:00:00 2001 From: Alvaro Viebrantz Date: Wed, 20 Aug 2025 11:05:55 -0400 Subject: [PATCH 316/333] ci: catch error when fetching dataset metadata on cleanup (#598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: catch error when fetching dataset metadata on cleanup * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: move MissingValuesInterpretation test inside JSONWriter test group * fix: catch errors for getDatasets call * refactor: move cleanup function to separated file * ci: increase timeout on before func that handles clean up --------- Co-authored-by: Owl Bot --- .../system-test/managed_writer_client_test.ts | 284 ++++++++---------- .../system-test/reader_client_test.ts | 34 +-- .../bigquery-storage/system-test/util.ts | 50 +++ 3 files changed, 181 insertions(+), 187 deletions(-) create mode 100644 handwritten/bigquery-storage/system-test/util.ts diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index dc355b5dfe1..b0ad7682daf 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -28,6 +28,7 @@ import * as customerRecordProtoJson from '../samples/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; import {PreciseDate} from '@google-cloud/precise-date'; +import {cleanupDatasets} from './util'; const pkg = JSON.parse( readFileSync(path.resolve(__dirname, '../../package.json'), 'utf-8'), @@ -109,10 +110,10 @@ describe('managedwriter.WriterClient', () => { }; before(async () => { - await deleteDatasets(); + await cleanupDatasets(bigquery, GCLOUD_TESTS_PREFIX); await bigquery.createDataset(datasetId); - }); + }).timeout(2 * 60 * 1000); beforeEach(async () => { tableId = generateUuid(); @@ -1020,146 +1021,146 @@ describe('managedwriter.WriterClient', () => { client.close(); } }); - }); - it('should fill default values when MissingValuesInterpretation is set', async () => { - bqWriteClient.initialize().catch(err => { - throw err; - }); - const client = new WriterClient(); - client.setClient(bqWriteClient); + it('should fill default values when MissingValuesInterpretation is set', async () => { + bqWriteClient.initialize().catch(err => { + throw err; + }); + const client = new WriterClient(); + client.setClient(bqWriteClient); - const schema: TableSchema = { - fields: [ - { - name: 'customer_name', - type: 'STRING', - mode: 'REQUIRED', - }, - { - name: 'row_num', - type: 'INTEGER', - mode: 'REQUIRED', - }, - { - name: 'id', - type: 'STRING', - mode: 'REQUIRED', - defaultValueExpression: 'GENERATE_UUID()', - }, - { - name: 'created_at', - type: 'TIMESTAMP', - defaultValueExpression: 'CURRENT_TIMESTAMP()', - }, - { - name: 'updated_at', - type: 'TIMESTAMP', - defaultValueExpression: 'CURRENT_TIMESTAMP()', - }, - ], - }; - const [table] = await bigquery - .dataset(datasetId) - .createTable(tableId + '_default_values', {schema}); - const parent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; + const schema: TableSchema = { + fields: [ + { + name: 'customer_name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'row_num', + type: 'INTEGER', + mode: 'REQUIRED', + }, + { + name: 'id', + type: 'STRING', + mode: 'REQUIRED', + defaultValueExpression: 'GENERATE_UUID()', + }, + { + name: 'created_at', + type: 'TIMESTAMP', + defaultValueExpression: 'CURRENT_TIMESTAMP()', + }, + { + name: 'updated_at', + type: 'TIMESTAMP', + defaultValueExpression: 'CURRENT_TIMESTAMP()', + }, + ], + }; + const [table] = await bigquery + .dataset(datasetId) + .createTable(tableId + '_default_values', {schema}); + const parent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; - const storageSchema = - adapt.convertBigQuerySchemaToStorageTableSchema(schema); - const protoDescriptor: DescriptorProto = - adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); - const row1 = { - customer_name: 'Ada Lovelace', - row_num: 1, - }; + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + }; - const row2 = { - customer_name: 'Alan Turing', - row_num: 2, - }; + const row2 = { + customer_name: 'Alan Turing', + row_num: 2, + }; - try { - const connection = await client.createStreamConnection({ - streamType: managedwriter.PendingStream, - destinationTable: parent, - }); + try { + const connection = await client.createStreamConnection({ + streamType: managedwriter.PendingStream, + destinationTable: parent, + }); - const streamId = connection.getStreamId(); - const writer = new JSONWriter({ - connection, - protoDescriptor, - defaultMissingValueInterpretation: 'DEFAULT_VALUE', - missingValueInterpretations: { - updated_at: 'NULL_VALUE', - }, - }); + const streamId = connection.getStreamId(); + const writer = new JSONWriter({ + connection, + protoDescriptor, + defaultMissingValueInterpretation: 'DEFAULT_VALUE', + missingValueInterpretations: { + updated_at: 'NULL_VALUE', + }, + }); - let pw = writer.appendRows([row1, row2], 0); - let result = await pw.getResult(); + let pw = writer.appendRows([row1, row2], 0); + let result = await pw.getResult(); - // change MVI config - writer.setDefaultMissingValueInterpretation('NULL_VALUE'); - writer.setMissingValueInterpretations({ - id: 'DEFAULT_VALUE', - updated_at: 'DEFAULT_VALUE', - }); + // change MVI config + writer.setDefaultMissingValueInterpretation('NULL_VALUE'); + writer.setMissingValueInterpretations({ + id: 'DEFAULT_VALUE', + updated_at: 'DEFAULT_VALUE', + }); - const row3 = { - customer_name: 'Charles Babbage', - row_num: 3, - }; + const row3 = { + customer_name: 'Charles Babbage', + row_num: 3, + }; - const row4 = { - customer_name: 'Lord Byron', - row_num: 4, - }; + const row4 = { + customer_name: 'Lord Byron', + row_num: 4, + }; - pw = writer.appendRows([row3, row4], 2); - result = await pw.getResult(); + pw = writer.appendRows([row3, row4], 2); + result = await pw.getResult(); - assert.equal(result.error, null); + assert.equal(result.error, null); - const res = await connection.finalize(); - connection.close(); - assert.equal(res?.rowCount, 4); + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 4); - const commitResponse = await client.batchCommitWriteStream({ - parent, - writeStreams: [streamId], - }); - assert.equal(commitResponse.streamErrors?.length, 0); - - const [rows] = await bigquery.query( - `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by row_num`, - ); - assert.strictEqual(rows.length, 4); - - const first = rows[0]; - assert.notEqual(first.id, ''); - assert.notEqual(first.created_at, null); - assert.equal(first.updated_at, null); - - const second = rows[1]; - assert.notEqual(second.id, ''); - assert.notEqual(second.created_at, null); - assert.equal(second.updated_at, null); - - // After change on MVI config - const third = rows[2]; - assert.notEqual(third.id, ''); - assert.equal(third.created_at, null); - assert.notEqual(third.updated_at, null); - - const forth = rows[3]; - assert.notEqual(forth.id, ''); - assert.equal(forth.created_at, null); - assert.notEqual(forth.updated_at, null); - - writer.close(); - } finally { - client.close(); - } + const commitResponse = await client.batchCommitWriteStream({ + parent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + + const [rows] = await bigquery.query( + `SELECT * FROM \`${projectId}.${datasetId}.${table.id}\` order by row_num`, + ); + assert.strictEqual(rows.length, 4); + + const first = rows[0]; + assert.notEqual(first.id, ''); + assert.notEqual(first.created_at, null); + assert.equal(first.updated_at, null); + + const second = rows[1]; + assert.notEqual(second.id, ''); + assert.notEqual(second.created_at, null); + assert.equal(second.updated_at, null); + + // After change on MVI config + const third = rows[2]; + assert.notEqual(third.id, ''); + assert.equal(third.created_at, null); + assert.notEqual(third.updated_at, null); + + const forth = rows[3]; + assert.notEqual(forth.id, ''); + assert.equal(forth.created_at, null); + assert.notEqual(forth.updated_at, null); + + writer.close(); + } finally { + client.close(); + } + }); }); describe('Flaky Scenarios', () => { @@ -1925,33 +1926,4 @@ describe('managedwriter.WriterClient', () => { } }); }); - - // Only delete a resource if it is older than 24 hours. That will prevent - // collisions with parallel CI test runs. - function isResourceStale(creationTime: number) { - const oneDayMs = 86400000; - const now = new Date(); - const created = new Date(creationTime); - return now.getTime() - created.getTime() >= oneDayMs; - } - - async function deleteDatasets() { - let [datasets] = await bigquery.getDatasets(); - datasets = datasets.filter(dataset => - dataset.id?.includes(GCLOUD_TESTS_PREFIX), - ); - - for (const dataset of datasets) { - const [metadata] = await dataset.getMetadata(); - const creationTime = Number(metadata.creationTime); - if (isResourceStale(creationTime)) { - try { - await dataset.delete({force: true}); - } catch (e) { - console.log(`dataset(${dataset.id}).delete() failed`); - console.log(e); - } - } - } - } }); diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts index c8e242b033f..0363f34789a 100644 --- a/handwritten/bigquery-storage/system-test/reader_client_test.ts +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -24,6 +24,7 @@ import {ClientOptions} from 'google-gax'; import * as customerRecordProtoJson from '../samples/customer_record.json'; import * as bigquerystorage from '../src'; import * as reader from '../src/reader'; +import {cleanupDatasets} from './util'; import {RecordBatch, Table, tableFromIPC} from 'apache-arrow'; type ReadRowsResponse = @@ -103,10 +104,10 @@ describe('reader.ReaderClient', () => { }; before(async () => { - await deleteDatasets(); + await cleanupDatasets(bigquery, GCLOUD_TESTS_PREFIX); await bigquery.createDataset(datasetId); - }); + }).timeout(2 * 60 * 1000); beforeEach(async () => { tableId = generateUuid(); @@ -687,33 +688,4 @@ describe('reader.ReaderClient', () => { } }); }); - - // Only delete a resource if it is older than 24 hours. That will prevent - // collisions with parallel CI test runs. - function isResourceStale(creationTime: number) { - const oneDayMs = 86400000; - const now = new Date(); - const created = new Date(creationTime); - return now.getTime() - created.getTime() >= oneDayMs; - } - - async function deleteDatasets() { - let [datasets] = await bigquery.getDatasets(); - datasets = datasets.filter(dataset => - dataset.id?.includes(GCLOUD_TESTS_PREFIX), - ); - - for (const dataset of datasets) { - const [metadata] = await dataset.getMetadata(); - const creationTime = Number(metadata.creationTime); - if (isResourceStale(creationTime)) { - try { - await dataset.delete({force: true}); - } catch (e) { - console.log(`dataset(${dataset.id}).delete() failed`); - console.log(e); - } - } - } - } }); diff --git a/handwritten/bigquery-storage/system-test/util.ts b/handwritten/bigquery-storage/system-test/util.ts new file mode 100644 index 00000000000..952cdc05cc5 --- /dev/null +++ b/handwritten/bigquery-storage/system-test/util.ts @@ -0,0 +1,50 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {BigQuery} from '@google-cloud/bigquery'; + +// Only delete a resource if it is older than 4 hours. That will prevent +// collisions with parallel CI test runs. +function isResourceStale(creationTime: number) { + const windowMs = 4 * 60 * 60 * 1000; + const now = new Date(); + const created = new Date(creationTime); + return now.getTime() - created.getTime() >= windowMs; +} + +export async function cleanupDatasets(client: BigQuery, datasetPrefix: string) { + let [datasets] = await client.getDatasets(); + datasets = datasets.filter(dataset => dataset.id?.includes(datasetPrefix)); + + for (const dataset of datasets) { + let isDatasetStable = false; + try { + const [metadata] = await dataset.getMetadata(); + const creationTime = Number(metadata.creationTime); + isDatasetStable = isResourceStale(creationTime); + } catch (e) { + console.log(`dataset(${dataset.id}).getMetadata() failed`); + console.log(e); + return; + } + if (isDatasetStable) { + try { + await dataset.delete({force: true}); + } catch (e) { + console.log(`dataset(${dataset.id}).delete() failed`); + console.log(e); + } + } + } +} From aaf8776cd84418b86f5e9ddf00d43689ba529089 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Wed, 20 Aug 2025 11:31:08 -0400 Subject: [PATCH 317/333] Revert "chore: revert previous node-version template updates (#593)" (#597) This reverts commit c664080e2fedc1eee239c40cf0bd56c18a14d28b. --- handwritten/bigquery-storage/.github/.OwlBot.lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml index be00544b5d7..3037bc547d9 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:1d7c29870723d4d2e32870c6dcdc43d4aa93dcc6519272d025bfcf2ecd48f091 -# created: 2025-08-14T17:16:30.591542591Z + digest: sha256:b612d739b0533e56ba174526ca339f264b63e911c30d6f83f55b57c38cc6ad2a +# created: 2025-08-15T12:36:48.871481111Z From 0a3e21d2d5366365e1f875e7c3f619a022294be6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 20 Aug 2025 19:04:48 +0200 Subject: [PATCH 318/333] chore(deps): update dependency sinon to v21 (#574) --- handwritten/bigquery-storage/.github/scripts/package.json | 2 +- handwritten/bigquery-storage/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json index 2c2e5207df9..26ab7802a9a 100644 --- a/handwritten/bigquery-storage/.github/scripts/package.json +++ b/handwritten/bigquery-storage/.github/scripts/package.json @@ -16,6 +16,6 @@ "devDependencies": { "@octokit/rest": "^19.0.0", "mocha": "^10.0.0", - "sinon": "^18.0.0" + "sinon": "^21.0.0" } } \ No newline at end of file diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c54ac91588d..419219b4185 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -57,7 +57,7 @@ "null-loader": "^4.0.1", "pack-n-play": "^3.0.1", "path-to-regexp": "^8.2.0", - "sinon": "^20.0.0", + "sinon": "^21.0.0", "ts-loader": "^9.5.2", "typescript": "^5.8.2", "uuid": "^11.1.0", From d6b883ee449c8182bd750f73e6e5ad232800fb79 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 20 Aug 2025 19:50:15 +0200 Subject: [PATCH 319/333] chore(deps): update dependency @octokit/rest to v21 (#548) --- handwritten/bigquery-storage/.github/scripts/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json index 26ab7802a9a..84b201023bf 100644 --- a/handwritten/bigquery-storage/.github/scripts/package.json +++ b/handwritten/bigquery-storage/.github/scripts/package.json @@ -14,7 +14,7 @@ "js-yaml": "^4.1.0" }, "devDependencies": { - "@octokit/rest": "^19.0.0", + "@octokit/rest": "^21.0.0", "mocha": "^10.0.0", "sinon": "^21.0.0" } From 788e94753d54011054af0c4c86870f074aca09ac Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Fri, 26 Sep 2025 16:33:31 -0400 Subject: [PATCH 320/333] fix: update gax version, adjust monkeypatch for protobufjs (#607) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: update gax version, adjust monkeypatch for protobufjs * lint * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/package.json | 5 +++-- handwritten/bigquery-storage/src/protobuf/index.ts | 10 ++++++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 419219b4185..d5b27d5e1bd 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -32,8 +32,9 @@ "apache-arrow": "^19.0.1", "core-js": "^3.41.0", "extend": "^3.0.2", - "google-auth-library": "^10.0.0-rc.1", - "google-gax": "^5.0.1-rc.0" + "google-auth-library": "^10.0.0", + "google-gax": "^5.0.0" + }, "peerDependencies": { "protobufjs": "^7.2.4 - 7.5.0" diff --git a/handwritten/bigquery-storage/src/protobuf/index.ts b/handwritten/bigquery-storage/src/protobuf/index.ts index 1091c343834..d8488eda36b 100644 --- a/handwritten/bigquery-storage/src/protobuf/index.ts +++ b/handwritten/bigquery-storage/src/protobuf/index.ts @@ -21,7 +21,13 @@ type IDescriptorProto = protos.google.protobuf.IDescriptorProto; declare module 'protobufjs' { // eslint-disable-next-line @typescript-eslint/no-namespace namespace Type { - let toDescriptor: (protoVersion: string) => IDescriptorProto; - let fromDescriptor: (descriptor: IDescriptorProto) => Type; + let fromDescriptor: ( + descriptor: Message | IDescriptorProto, + ) => Type; + } + interface Type { + toDescriptor( + protoVersion: string, + ): Message & IDescriptorProto; } } From c6495e116787ac92ad86b5e0de70a6f50cdf6908 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 26 Sep 2025 16:48:32 -0400 Subject: [PATCH 321/333] chore: update generator logic for nodejs_gapic_combined_pkg rule, and update templates (#576) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add methods from gax to cache proto root and process custom error details fix: distinguish plural and singular path templates PiperOrigin-RevId: 772168312 Source-Link: https://github.com/googleapis/googleapis/commit/873d84ec93f0f7606f5e5c8f11d06f1ebb198a6b Source-Link: https://github.com/googleapis/googleapis-gen/commit/f448c1b4eaaa5fdc7021a682068c313d6f2f104a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjQ0OGMxYjRlYWFhNWZkYzcwMjFhNjgyMDY4YzMxM2Q2ZjJmMTA0YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: add top-level BUILD rule for new bundled generation process PiperOrigin-RevId: 802243639 Source-Link: https://github.com/googleapis/googleapis/commit/4a2d23d124c5d9f0dec05c110659fd0a50462969 Source-Link: https://github.com/googleapis/googleapis-gen/commit/03d5b980fd912f8e281f817744a9cef35aaf3330 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDNkNWI5ODBmZDkxMmY4ZTI4MWY4MTc3NDRhOWNlZjM1YWFmMzMzMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove .md extension on LICENSE files PiperOrigin-RevId: 802685533 Source-Link: https://github.com/googleapis/googleapis/commit/0d129bf6a674a5b7b1fe0f29df633547236dbb55 Source-Link: https://github.com/googleapis/googleapis-gen/commit/73664e200253ddb8de2b9bb9c65f6b68301102fb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzM2NjRlMjAwMjUzZGRiOGRlMmI5YmI5YzY1ZjZiNjgzMDExMDJmYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: remove unneeded BAZEL import PiperOrigin-RevId: 802775018 Source-Link: https://github.com/googleapis/googleapis/commit/af3f9267503e6eb56660c0557d79ecdb0c019065 Source-Link: https://github.com/googleapis/googleapis-gen/commit/48e463440e963c15513ed1c76674975546f6dba6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDhlNDYzNDQwZTk2M2MxNTUxM2VkMWM3NjY3NDk3NTU0NmY2ZGJhNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: improve README and fix combined gapic rule generation chore: request.id should be typecast to a string PiperOrigin-RevId: 803577907 Source-Link: https://github.com/googleapis/googleapis/commit/8f0f8860e57013b851174dba371c19af9c368b41 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6c5556d05b9e3e4bbe1a78b36daa01f9a398dec1 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmM1NTU2ZDA1YjllM2U0YmJlMWE3OGIzNmRhYTAxZjlhMzk4ZGVjMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update generator logic for nodejs_gapic_combined_pkg rule, and update templates PiperOrigin-RevId: 808677883 Source-Link: https://github.com/googleapis/googleapis/commit/a32846d78b106d72f51e5e2186bc3be9dbf45c7d Source-Link: https://github.com/googleapis/googleapis-gen/commit/593da2652f2c43b769dfeb19161a3be955f97674 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTkzZGEyNjUyZjJjNDNiNzY5ZGZlYjE5MTYxYTNiZTk1NWY5NzY3NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/.OwlBot.yaml | 19 ++ handwritten/bigquery-storage/LICENSE.md | 202 ++++++++++++++++++ .../src/v1/big_query_read_client.ts | 38 +++- .../src/v1/big_query_write_client.ts | 92 +++++++- .../metastore_partition_service_client.ts | 74 ++++++- .../metastore_partition_service_client.ts | 74 ++++++- .../src/v1beta1/big_query_storage_client.ts | 80 ++++++- 7 files changed, 553 insertions(+), 26 deletions(-) create mode 100644 handwritten/bigquery-storage/.OwlBot.yaml create mode 100644 handwritten/bigquery-storage/LICENSE.md diff --git a/handwritten/bigquery-storage/.OwlBot.yaml b/handwritten/bigquery-storage/.OwlBot.yaml new file mode 100644 index 00000000000..7dc2a309c91 --- /dev/null +++ b/handwritten/bigquery-storage/.OwlBot.yaml @@ -0,0 +1,19 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/bigquery/storage/google-cloud-bigquery-storage-nodejs + dest: /owl-bot-staging/google-cloud-bigquery-storage + +api-name: storage \ No newline at end of file diff --git a/handwritten/bigquery-storage/LICENSE.md b/handwritten/bigquery-storage/LICENSE.md new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/handwritten/bigquery-storage/LICENSE.md @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts index c835e27f186..b34ec292cc9 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_read_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_read_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -574,7 +574,23 @@ export class BigQueryReadClient { this._log.info('createReadSession response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Splits a given `ReadStream` into two `ReadStream` objects. These @@ -720,7 +736,23 @@ export class BigQueryReadClient { this._log.info('splitReadStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts index debb8fbac27..a8db6ca408b 100644 --- a/handwritten/bigquery-storage/src/v1/big_query_write_client.ts +++ b/handwritten/bigquery-storage/src/v1/big_query_write_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -548,7 +548,23 @@ export class BigQueryWriteClient { this._log.info('createWriteStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets information about a write stream. @@ -679,7 +695,23 @@ export class BigQueryWriteClient { this._log.info('getWriteStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Finalize a write stream so that no new data can be appended to the @@ -808,7 +840,23 @@ export class BigQueryWriteClient { this._log.info('finalizeWriteStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Atomically commits a group of `PENDING` streams that belong to the same @@ -943,7 +991,23 @@ export class BigQueryWriteClient { this._log.info('batchCommitWriteStreams response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Flushes rows to a BUFFERED stream. @@ -1071,7 +1135,23 @@ export class BigQueryWriteClient { this._log.info('flushRows response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts index 67ba5f9d1eb..ff3134f4649 100644 --- a/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts +++ b/handwritten/bigquery-storage/src/v1alpha/metastore_partition_service_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -553,7 +553,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes metastore partitions from a table. @@ -697,7 +713,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Updates metastore partitions in a table. @@ -840,7 +872,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets metastore partitions from a table. @@ -986,7 +1034,23 @@ export class MetastorePartitionServiceClient { this._log.info('listMetastorePartitions response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts index 93b471cf123..2f0a4eb9d29 100644 --- a/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts +++ b/handwritten/bigquery-storage/src/v1beta/metastore_partition_service_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -553,7 +553,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes metastore partitions from a table. @@ -697,7 +713,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Updates metastore partitions in a table. @@ -840,7 +872,23 @@ export class MetastorePartitionServiceClient { ); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets metastore partitions from a table. @@ -988,7 +1036,23 @@ export class MetastorePartitionServiceClient { this._log.info('listMetastorePartitions response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts index 9ac7f79367b..782d60859b0 100644 --- a/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts +++ b/handwritten/bigquery-storage/src/v1beta1/big_query_storage_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -534,8 +534,10 @@ export class BigQueryStorageClient { options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = this._gaxModule.routingHeader.fromParams({ - 'table_reference.project_id': request.tableReference!.projectId ?? '', - 'table_reference.dataset_id': request.tableReference!.datasetId ?? '', + 'table_reference.project_id': + request.tableReference!.projectId?.toString() ?? '', + 'table_reference.dataset_id': + request.tableReference!.datasetId?.toString() ?? '', }); this.initialize().catch(err => { throw err; @@ -569,7 +571,23 @@ export class BigQueryStorageClient { this._log.info('createReadSession response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Creates additional streams for a ReadSession. This API can be used to @@ -703,7 +721,23 @@ export class BigQueryStorageClient { this._log.info('batchCreateReadSessionStreams response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Causes a single stream in a ReadSession to gracefully stop. This @@ -843,7 +877,23 @@ export class BigQueryStorageClient { this._log.info('finalizeStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Splits a given read stream into two Streams. These streams are referred to @@ -990,7 +1040,23 @@ export class BigQueryStorageClient { this._log.info('splitReadStream response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** From bb93b5b17f6c9ebc18920e97462050b5676e190c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 27 Sep 2025 01:40:26 +0100 Subject: [PATCH 322/333] chore(deps): update dependency jsdoc-fresh to v4 (#570) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d5b27d5e1bd..d4bfbf7c0dc 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -50,7 +50,7 @@ "gapic-tools": "^1.0.1", "gts": "^6.0.2", "jsdoc": "^4.0.4", - "jsdoc-fresh": "^3.0.0", + "jsdoc-fresh": "^4.0.0", "jsdoc-region-tag": "^3.0.0", "linkinator": "^6.1.2", "mocha": "^11.1.0", From 0177fd335bc74a7af308348425dca417586dc906 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 27 Sep 2025 01:48:08 +0100 Subject: [PATCH 323/333] fix(deps): update dependency apache-arrow to v21 (#579) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index d4bfbf7c0dc..c2c5bb54cb5 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -29,7 +29,7 @@ "dependencies": { "@google-cloud/paginator": "^6.0.0", "@google-cloud/precise-date": "^5.0.0", - "apache-arrow": "^19.0.1", + "apache-arrow": "^21.0.0", "core-js": "^3.41.0", "extend": "^3.0.2", "google-auth-library": "^10.0.0", From 6220309cac03db7ec13198b7b4e464e075165601 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 29 Sep 2025 13:56:57 +0100 Subject: [PATCH 324/333] chore(deps): update dependency pack-n-play to v4 (#606) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index c2c5bb54cb5..8838d09c56f 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -56,7 +56,7 @@ "mocha": "^11.1.0", "nise": "^6.1.1", "null-loader": "^4.0.1", - "pack-n-play": "^3.0.1", + "pack-n-play": "^4.0.0", "path-to-regexp": "^8.2.0", "sinon": "^21.0.0", "ts-loader": "^9.5.2", From 991b335d1b552af52695b52b5f7afbe0bd303d17 Mon Sep 17 00:00:00 2001 From: danieljbruce Date: Wed, 18 Feb 2026 13:42:18 -0500 Subject: [PATCH 325/333] test: Skip test where large request returns an error (#647) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Set the test to skipped * Add a TODO for the skipped test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Change test documentation * undo yaml changes * Revert "undo yaml changes" This reverts commit b1b524603a352b6d725e082a935bb6707480f7f9. --------- Co-authored-by: Owl Bot --- handwritten/bigquery-storage/protos/protos.d.ts | 2 +- handwritten/bigquery-storage/protos/protos.js | 2 +- handwritten/bigquery-storage/protos/protos.json | 9 ++++++++- .../system-test/managed_writer_client_test.ts | 5 ++++- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index 0f9ad177a15..d4ed87d16d3 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -1,4 +1,4 @@ -// Copyright 2025 Google LLC +// Copyright 2026 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index e5b18720ae2..518810e0e07 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -1,4 +1,4 @@ -// Copyright 2025 Google LLC +// Copyright 2026 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 0307c1da4e2..450b5bef965 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -3150,7 +3150,14 @@ "type": "ServiceOptions", "id": 3 } - } + }, + "reserved": [ + [ + 4, + 4 + ], + "stream" + ] }, "MethodDescriptorProto": { "edition": "proto2", diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index b0ad7682daf..fbe7d1d75f1 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -1617,7 +1617,10 @@ describe('managedwriter.WriterClient', () => { } }); - it('send large request should return an error', async () => { + it.skip('send large request should return an error', async () => { + // Service limits changes are in flux, so this we disabled this test as is prone to flakes + // + // TODO: This will be tracked in bug https://b.corp.google.com/issues/485577546 bqWriteClient.initialize().catch(err => { throw err; }); From 52476cc1b354800eca83f00ccbddbad2d972e0ad Mon Sep 17 00:00:00 2001 From: danieljbruce Date: Thu, 19 Feb 2026 15:55:45 -0500 Subject: [PATCH 326/333] fix: Delete the whole owlbot yaml file to trigger next googlegen-apis release (#648) * Delete the whole owlbot yaml file * fix: Delete the whole owlbot yaml file to trigger next googlegen-apis release * Add to templates_excludes --- handwritten/bigquery-storage/.OwlBot.yaml | 19 ------------------- handwritten/bigquery-storage/owlbot.py | 2 +- 2 files changed, 1 insertion(+), 20 deletions(-) delete mode 100644 handwritten/bigquery-storage/.OwlBot.yaml diff --git a/handwritten/bigquery-storage/.OwlBot.yaml b/handwritten/bigquery-storage/.OwlBot.yaml deleted file mode 100644 index 7dc2a309c91..00000000000 --- a/handwritten/bigquery-storage/.OwlBot.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -deep-copy-regex: - - source: /google/cloud/bigquery/storage/google-cloud-bigquery-storage-nodejs - dest: /owl-bot-staging/google-cloud-bigquery-storage - -api-name: storage \ No newline at end of file diff --git a/handwritten/bigquery-storage/owlbot.py b/handwritten/bigquery-storage/owlbot.py index 44b15e78f18..918849d1a7d 100644 --- a/handwritten/bigquery-storage/owlbot.py +++ b/handwritten/bigquery-storage/owlbot.py @@ -16,5 +16,5 @@ node.owlbot_main( staging_excludes=['package.json', 'README.md', 'src/index.ts'], - templates_excludes=['src/index.ts'] + templates_excludes=['src/index.ts','.OwlBot.yaml'] ) From 3126225cc65cf769bfab0d111a9be50233824f58 Mon Sep 17 00:00:00 2001 From: Tomo Suzuki Date: Mon, 23 Feb 2026 14:33:15 -0500 Subject: [PATCH 327/333] chore: replace old teams with cloud-sdk-nodejs-team and bigquery-team (#649) b/478003109 --- handwritten/bigquery-storage/.github/CODEOWNERS | 2 +- handwritten/bigquery-storage/.repo-metadata.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS index d982c0a9506..4fabf4416d6 100644 --- a/handwritten/bigquery-storage/.github/CODEOWNERS +++ b/handwritten/bigquery-storage/.github/CODEOWNERS @@ -6,4 +6,4 @@ # Unless specified, the jsteam is the default owner for nodejs repositories. -* @googleapis/api-bigquery @googleapis/jsteam \ No newline at end of file +* @googleapis/bigquery-team @googleapis/cloud-sdk-nodejs-team @googleapis/jsteam \ No newline at end of file diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index 6d5e38471a7..a85a5e5bc0f 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -11,7 +11,7 @@ "name": "bigquerystorage", "name_pretty": "Google BigQuery Storage", "api_id": "bigquerystorage.googleapis.com", - "codeowner_team": "@googleapis/api-bigquery", + "codeowner_team": "@googleapis/bigquery-team @googleapis/cloud-sdk-nodejs-team", "api_shortname": "bigquerystorage", "library_type": "GAPIC_AUTO" } From 4add7d8dc9f30c89225adf640cf1117d98a263f0 Mon Sep 17 00:00:00 2001 From: danieljbruce Date: Wed, 25 Feb 2026 11:37:02 -0500 Subject: [PATCH 328/333] feat: Add high precision Picosecond timestamp support for write calls (#654) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Port over the new proto changes * Add the deleted protos back * Add the generated tests * Check out the read and write clients from main * regenerate protos * Revert changes to generated test files * Add the new timestamp-output-format file * test: update timestamp output format system test to use direct request Modify the system test in `timestamp_output_format.ts` to use `table.request` instead of `table.getRows`. This ensures that we are checking the raw data returned by the server, bypassing any unreleased changes in the BigQuery library. The test now verifies high-precision timestamps by inspecting the raw JSON response from the `/data` endpoint. Co-authored-by: danieljbruce <8935272+danieljbruce@users.noreply.github.com> * test: add system test for picosecond precision timestamps Added a new system test case to `managed_writer_client_test.ts` that verifies the ability to write timestamps with picosecond precision. The test creates a new table with a schema specifying `timestampPrecision: 12` and writes a row with a high-precision timestamp string. This test helps ensure that the BigQuery Storage Write API and the associated adaptation logic correctly handle picosecond precision timestamps. Co-authored-by: danieljbruce <8935272+danieljbruce@users.noreply.github.com> * Isolate the writer test - try to solve interior er * Change timestampPrecision mapping * correct the automated changes * timestampPrecision value changes * Read from row 2 and not row 0 * remove only * Delete file with read tests * Revert the year on index.ts * Added unit tests for source code changes * Unit tests should cover the high precision * Add assertion check for type string * Delete useless tests * remove one of the timestamp precision tests * Add TODO * Update src/adapt/proto.ts Co-authored-by: Alvaro Viebrantz * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: danieljbruce <8935272+danieljbruce@users.noreply.github.com> Co-authored-by: Alvaro Viebrantz Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/arrow.proto | 25 + .../cloud/bigquery/storage/v1/avro.proto | 25 + .../cloud/bigquery/storage/v1/storage.proto | 19 +- .../cloud/bigquery/storage/v1/stream.proto | 6 +- .../cloud/bigquery/storage/v1/table.proto | 11 + .../bigquery-storage/protos/protos.d.ts | 243 ++++--- handwritten/bigquery-storage/protos/protos.js | 676 +++++++++++------- .../bigquery-storage/protos/protos.json | 59 +- .../bigquery-storage/src/adapt/proto.ts | 8 +- .../bigquery-storage/src/adapt/schema.ts | 11 + .../system-test/managed_writer_client_test.ts | 130 ++++ .../bigquery-storage/test/adapt/proto.ts | 41 ++ 12 files changed, 870 insertions(+), 384 deletions(-) diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto index f4f17c3cdf5..0132aab1935 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/arrow.proto @@ -58,7 +58,32 @@ message ArrowSerializationOptions { ZSTD = 2; } + // The precision of the timestamp value in the Avro message. This precision + // will **only** be applied to the column(s) with the `TIMESTAMP_PICOS` type. + enum PicosTimestampPrecision { + // Unspecified timestamp precision. The default precision is microseconds. + PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0; + + // Timestamp values returned by Read API will be truncated to microsecond + // level precision. The value will be encoded as Arrow TIMESTAMP type in a + // 64 bit integer. + TIMESTAMP_PRECISION_MICROS = 1; + + // Timestamp values returned by Read API will be truncated to nanosecond + // level precision. The value will be encoded as Arrow TIMESTAMP type in a + // 64 bit integer. + TIMESTAMP_PRECISION_NANOS = 2; + + // Read API will return full precision picosecond value. The value will be + // encoded as a string which conforms to ISO 8601 format. + TIMESTAMP_PRECISION_PICOS = 3; + } + // The compression codec to use for Arrow buffers in serialized record // batches. CompressionCodec buffer_compression = 2; + + // Optional. Set timestamp precision option. If not set, the default precision + // is microseconds. + PicosTimestampPrecision picos_timestamp_precision = 3; } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto index ddf7c15ae21..6082fa58d86 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/avro.proto @@ -42,6 +42,27 @@ message AvroRows { // Contains options specific to Avro Serialization. message AvroSerializationOptions { + // The precision of the timestamp value in the Avro message. This precision + // will **only** be applied to the column(s) with the `TIMESTAMP_PICOS` type. + enum PicosTimestampPrecision { + // Unspecified timestamp precision. The default precision is microseconds. + PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0; + + // Timestamp values returned by Read API will be truncated to microsecond + // level precision. The value will be encoded as Avro TIMESTAMP type in a + // 64 bit integer. + TIMESTAMP_PRECISION_MICROS = 1; + + // Timestamp values returned by Read API will be truncated to nanosecond + // level precision. The value will be encoded as Avro TIMESTAMP type in a + // 64 bit integer. + TIMESTAMP_PRECISION_NANOS = 2; + + // Read API will return full precision picosecond value. The value will be + // encoded as a string which conforms to ISO 8601 format. + TIMESTAMP_PRECISION_PICOS = 3; + } + // Enable displayName attribute in Avro schema. // // The Avro specification requires field names to be alphanumeric. By @@ -53,4 +74,8 @@ message AvroSerializationOptions { // value and populates a "displayName" attribute for every avro field with the // original column name. bool enable_display_name_attribute = 1; + + // Optional. Set timestamp precision option. If not set, the default precision + // is microseconds. + PicosTimestampPrecision picos_timestamp_precision = 2; } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto index c9dc3f3d460..dc0ae7f9f4d 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/storage.proto @@ -78,9 +78,9 @@ service BigQueryRead { } // Reads rows from the stream in the format prescribed by the ReadSession. - // Each response contains one or more table rows, up to a maximum of 100 MiB + // Each response contains one or more table rows, up to a maximum of 128 MB // per response; read requests which attempt to read individual rows larger - // than 100 MiB will fail. + // than 128 MB will fail. // // Each request also returns a set of stream statistics reflecting the current // state of the stream. @@ -423,8 +423,6 @@ message CreateWriteStreamRequest { // Requests larger than this return an error, typically `INVALID_ARGUMENT`. message AppendRowsRequest { // Arrow schema and data. - // Arrow format is an experimental feature only selected for allowlisted - // customers. message ArrowData { // Optional. Arrow Schema used to serialize the data. ArrowSchema writer_schema = 1; @@ -436,8 +434,8 @@ message AppendRowsRequest { // ProtoData contains the data rows and schema when constructing append // requests. message ProtoData { - // The protocol buffer schema used to serialize the data. Provide this value - // whenever: + // Optional. The protocol buffer schema used to serialize the data. Provide + // this value whenever: // // * You send the first request of an RPC connection. // @@ -446,7 +444,7 @@ message AppendRowsRequest { // * You specify a new destination table. ProtoSchema writer_schema = 1; - // Serialized row data in protobuf message format. + // Required. Serialized row data in protobuf message format. // Currently, the backend expects the serialized rows to adhere to // proto2 semantics when appending rows, particularly with respect to // how default values are encoded. @@ -522,8 +520,7 @@ message AppendRowsRequest { // Rows in proto format. ProtoData proto_rows = 4; - // Rows in arrow format. This is an experimental feature only selected for - // allowlisted customers. + // Rows in arrow format. ArrowData arrow_rows = 5; } @@ -553,8 +550,8 @@ message AppendRowsRequest { // Optional. Default missing value interpretation for all columns in the // table. When a value is specified on an `AppendRowsRequest`, it is applied - // to all requests on the connection from that point forward, until a - // subsequent `AppendRowsRequest` sets it to a different value. + // to all requests from that point forward, until a subsequent + // `AppendRowsRequest` sets it to a different value. // `missing_value_interpretation` can override // `default_missing_value_interpretation`. For example, if you want to write // `NULL` instead of using default values for some columns, you can set diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto index 2e52a0732b1..f0d1dfef5c3 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/stream.proto @@ -328,8 +328,8 @@ message WriteStream { // Immutable. Mode of the stream. WriteMode write_mode = 7 [(google.api.field_behavior) = IMMUTABLE]; - // Immutable. The geographic location where the stream's dataset resides. See - // https://cloud.google.com/bigquery/docs/locations for supported + // Output only. The geographic location where the stream's dataset resides. + // See https://cloud.google.com/bigquery/docs/locations for supported // locations. - string location = 8 [(google.api.field_behavior) = IMMUTABLE]; + string location = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto index eb75d706725..30c30228c27 100644 --- a/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto +++ b/handwritten/bigquery-storage/protos/google/cloud/bigquery/storage/v1/table.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1; import "google/api/field_behavior.proto"; +import "google/protobuf/wrappers.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Storage.V1"; option go_package = "cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb"; @@ -178,6 +179,16 @@ message TableFieldSchema { // (https://cloud.google.com/bigquery/docs/default-values) for this field. string default_value_expression = 10 [(google.api.field_behavior) = OPTIONAL]; + // Optional. Precision (maximum number of total digits in base 10) for seconds + // of TIMESTAMP type. + // + // Possible values include: + // + // * 6 (Default, for TIMESTAMP type with microsecond precision) + // * 12 (For TIMESTAMP type with picosecond precision) + google.protobuf.Int64Value timestamp_precision = 27 + [(google.api.field_behavior) = OPTIONAL]; + // Optional. The subtype of the RANGE, if the type of this field is RANGE. If // the type is RANGE, this field is required. Possible values for the field // element type of a RANGE include: diff --git a/handwritten/bigquery-storage/protos/protos.d.ts b/handwritten/bigquery-storage/protos/protos.d.ts index d4ed87d16d3..b5666824141 100644 --- a/handwritten/bigquery-storage/protos/protos.d.ts +++ b/handwritten/bigquery-storage/protos/protos.d.ts @@ -234,6 +234,9 @@ export namespace google { /** ArrowSerializationOptions bufferCompression */ bufferCompression?: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null); + + /** ArrowSerializationOptions picosTimestampPrecision */ + picosTimestampPrecision?: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision|null); } /** Represents an ArrowSerializationOptions. */ @@ -248,6 +251,9 @@ export namespace google { /** ArrowSerializationOptions bufferCompression. */ public bufferCompression: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec); + /** ArrowSerializationOptions picosTimestampPrecision. */ + public picosTimestampPrecision: (google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision|keyof typeof google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision); + /** * Creates a new ArrowSerializationOptions instance using the specified properties. * @param [properties] Properties to set @@ -334,6 +340,14 @@ export namespace google { LZ4_FRAME = 1, ZSTD = 2 } + + /** PicosTimestampPrecision enum. */ + enum PicosTimestampPrecision { + PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0, + TIMESTAMP_PRECISION_MICROS = 1, + TIMESTAMP_PRECISION_NANOS = 2, + TIMESTAMP_PRECISION_PICOS = 3 + } } /** Properties of an AvroSchema. */ @@ -541,6 +555,9 @@ export namespace google { /** AvroSerializationOptions enableDisplayNameAttribute */ enableDisplayNameAttribute?: (boolean|null); + + /** AvroSerializationOptions picosTimestampPrecision */ + picosTimestampPrecision?: (google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision|keyof typeof google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision|null); } /** Represents an AvroSerializationOptions. */ @@ -555,6 +572,9 @@ export namespace google { /** AvroSerializationOptions enableDisplayNameAttribute. */ public enableDisplayNameAttribute: boolean; + /** AvroSerializationOptions picosTimestampPrecision. */ + public picosTimestampPrecision: (google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision|keyof typeof google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision); + /** * Creates a new AvroSerializationOptions instance using the specified properties. * @param [properties] Properties to set @@ -633,6 +653,17 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace AvroSerializationOptions { + + /** PicosTimestampPrecision enum. */ + enum PicosTimestampPrecision { + PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0, + TIMESTAMP_PRECISION_MICROS = 1, + TIMESTAMP_PRECISION_NANOS = 2, + TIMESTAMP_PRECISION_PICOS = 3 + } + } + /** Properties of a ProtoSchema. */ interface IProtoSchema { @@ -4363,6 +4394,9 @@ export namespace google { /** TableFieldSchema defaultValueExpression */ defaultValueExpression?: (string|null); + /** TableFieldSchema timestampPrecision */ + timestampPrecision?: (google.protobuf.IInt64Value|null); + /** TableFieldSchema rangeElementType */ rangeElementType?: (google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null); } @@ -4403,6 +4437,9 @@ export namespace google { /** TableFieldSchema defaultValueExpression. */ public defaultValueExpression: string; + /** TableFieldSchema timestampPrecision. */ + public timestampPrecision?: (google.protobuf.IInt64Value|null); + /** TableFieldSchema rangeElementType. */ public rangeElementType?: (google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null); @@ -15961,109 +15998,6 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } - /** Properties of a Timestamp. */ - interface ITimestamp { - - /** Timestamp seconds */ - seconds?: (number|Long|string|null); - - /** Timestamp nanos */ - nanos?: (number|null); - } - - /** Represents a Timestamp. */ - class Timestamp implements ITimestamp { - - /** - * Constructs a new Timestamp. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.ITimestamp); - - /** Timestamp seconds. */ - public seconds: (number|Long|string); - - /** Timestamp nanos. */ - public nanos: number; - - /** - * Creates a new Timestamp instance using the specified properties. - * @param [properties] Properties to set - * @returns Timestamp instance - */ - public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; - - /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @param message Timestamp message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Timestamp message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; - - /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; - - /** - * Verifies a Timestamp message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns Timestamp - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; - - /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. - * @param message Timestamp - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Timestamp to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for Timestamp - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - /** Properties of a DoubleValue. */ interface IDoubleValue { @@ -16937,6 +16871,109 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** Properties of a Timestamp. */ + interface ITimestamp { + + /** Timestamp seconds */ + seconds?: (number|Long|string|null); + + /** Timestamp nanos */ + nanos?: (number|null); + } + + /** Represents a Timestamp. */ + class Timestamp implements ITimestamp { + + /** + * Constructs a new Timestamp. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.ITimestamp); + + /** Timestamp seconds. */ + public seconds: (number|Long|string); + + /** Timestamp nanos. */ + public nanos: number; + + /** + * Creates a new Timestamp instance using the specified properties. + * @param [properties] Properties to set + * @returns Timestamp instance + */ + public static create(properties?: google.protobuf.ITimestamp): google.protobuf.Timestamp; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @param message Timestamp message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.ITimestamp, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.Timestamp; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.Timestamp; + + /** + * Verifies a Timestamp message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Timestamp + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.Timestamp; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @param message Timestamp + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.Timestamp, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Timestamp to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Timestamp + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** Properties of an Any. */ interface IAny { diff --git a/handwritten/bigquery-storage/protos/protos.js b/handwritten/bigquery-storage/protos/protos.js index 518810e0e07..e86f3b79a4b 100644 --- a/handwritten/bigquery-storage/protos/protos.js +++ b/handwritten/bigquery-storage/protos/protos.js @@ -548,6 +548,7 @@ * @memberof google.cloud.bigquery.storage.v1 * @interface IArrowSerializationOptions * @property {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec|null} [bufferCompression] ArrowSerializationOptions bufferCompression + * @property {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision|null} [picosTimestampPrecision] ArrowSerializationOptions picosTimestampPrecision */ /** @@ -573,6 +574,14 @@ */ ArrowSerializationOptions.prototype.bufferCompression = 0; + /** + * ArrowSerializationOptions picosTimestampPrecision. + * @member {google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision} picosTimestampPrecision + * @memberof google.cloud.bigquery.storage.v1.ArrowSerializationOptions + * @instance + */ + ArrowSerializationOptions.prototype.picosTimestampPrecision = 0; + /** * Creates a new ArrowSerializationOptions instance using the specified properties. * @function create @@ -599,6 +608,8 @@ writer = $Writer.create(); if (message.bufferCompression != null && Object.hasOwnProperty.call(message, "bufferCompression")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.bufferCompression); + if (message.picosTimestampPrecision != null && Object.hasOwnProperty.call(message, "picosTimestampPrecision")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.picosTimestampPrecision); return writer; }; @@ -639,6 +650,10 @@ message.bufferCompression = reader.int32(); break; } + case 3: { + message.picosTimestampPrecision = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -683,6 +698,16 @@ case 2: break; } + if (message.picosTimestampPrecision != null && message.hasOwnProperty("picosTimestampPrecision")) + switch (message.picosTimestampPrecision) { + default: + return "picosTimestampPrecision: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } return null; }; @@ -718,6 +743,30 @@ message.bufferCompression = 2; break; } + switch (object.picosTimestampPrecision) { + default: + if (typeof object.picosTimestampPrecision === "number") { + message.picosTimestampPrecision = object.picosTimestampPrecision; + break; + } + break; + case "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED": + case 0: + message.picosTimestampPrecision = 0; + break; + case "TIMESTAMP_PRECISION_MICROS": + case 1: + message.picosTimestampPrecision = 1; + break; + case "TIMESTAMP_PRECISION_NANOS": + case 2: + message.picosTimestampPrecision = 2; + break; + case "TIMESTAMP_PRECISION_PICOS": + case 3: + message.picosTimestampPrecision = 3; + break; + } return message; }; @@ -734,10 +783,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.bufferCompression = options.enums === String ? "COMPRESSION_UNSPECIFIED" : 0; + object.picosTimestampPrecision = options.enums === String ? "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED" : 0; + } if (message.bufferCompression != null && message.hasOwnProperty("bufferCompression")) object.bufferCompression = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] === undefined ? message.bufferCompression : $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec[message.bufferCompression] : message.bufferCompression; + if (message.picosTimestampPrecision != null && message.hasOwnProperty("picosTimestampPrecision")) + object.picosTimestampPrecision = options.enums === String ? $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision[message.picosTimestampPrecision] === undefined ? message.picosTimestampPrecision : $root.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision[message.picosTimestampPrecision] : message.picosTimestampPrecision; return object; }; @@ -783,6 +836,24 @@ return values; })(); + /** + * PicosTimestampPrecision enum. + * @name google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision + * @enum {number} + * @property {number} PICOS_TIMESTAMP_PRECISION_UNSPECIFIED=0 PICOS_TIMESTAMP_PRECISION_UNSPECIFIED value + * @property {number} TIMESTAMP_PRECISION_MICROS=1 TIMESTAMP_PRECISION_MICROS value + * @property {number} TIMESTAMP_PRECISION_NANOS=2 TIMESTAMP_PRECISION_NANOS value + * @property {number} TIMESTAMP_PRECISION_PICOS=3 TIMESTAMP_PRECISION_PICOS value + */ + ArrowSerializationOptions.PicosTimestampPrecision = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED"] = 0; + values[valuesById[1] = "TIMESTAMP_PRECISION_MICROS"] = 1; + values[valuesById[2] = "TIMESTAMP_PRECISION_NANOS"] = 2; + values[valuesById[3] = "TIMESTAMP_PRECISION_PICOS"] = 3; + return values; + })(); + return ArrowSerializationOptions; })(); @@ -1250,6 +1321,7 @@ * @memberof google.cloud.bigquery.storage.v1 * @interface IAvroSerializationOptions * @property {boolean|null} [enableDisplayNameAttribute] AvroSerializationOptions enableDisplayNameAttribute + * @property {google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision|null} [picosTimestampPrecision] AvroSerializationOptions picosTimestampPrecision */ /** @@ -1275,6 +1347,14 @@ */ AvroSerializationOptions.prototype.enableDisplayNameAttribute = false; + /** + * AvroSerializationOptions picosTimestampPrecision. + * @member {google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision} picosTimestampPrecision + * @memberof google.cloud.bigquery.storage.v1.AvroSerializationOptions + * @instance + */ + AvroSerializationOptions.prototype.picosTimestampPrecision = 0; + /** * Creates a new AvroSerializationOptions instance using the specified properties. * @function create @@ -1301,6 +1381,8 @@ writer = $Writer.create(); if (message.enableDisplayNameAttribute != null && Object.hasOwnProperty.call(message, "enableDisplayNameAttribute")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.enableDisplayNameAttribute); + if (message.picosTimestampPrecision != null && Object.hasOwnProperty.call(message, "picosTimestampPrecision")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.picosTimestampPrecision); return writer; }; @@ -1341,6 +1423,10 @@ message.enableDisplayNameAttribute = reader.bool(); break; } + case 2: { + message.picosTimestampPrecision = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -1379,6 +1465,16 @@ if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) if (typeof message.enableDisplayNameAttribute !== "boolean") return "enableDisplayNameAttribute: boolean expected"; + if (message.picosTimestampPrecision != null && message.hasOwnProperty("picosTimestampPrecision")) + switch (message.picosTimestampPrecision) { + default: + return "picosTimestampPrecision: enum value expected"; + case 0: + case 1: + case 2: + case 3: + break; + } return null; }; @@ -1396,6 +1492,30 @@ var message = new $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions(); if (object.enableDisplayNameAttribute != null) message.enableDisplayNameAttribute = Boolean(object.enableDisplayNameAttribute); + switch (object.picosTimestampPrecision) { + default: + if (typeof object.picosTimestampPrecision === "number") { + message.picosTimestampPrecision = object.picosTimestampPrecision; + break; + } + break; + case "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED": + case 0: + message.picosTimestampPrecision = 0; + break; + case "TIMESTAMP_PRECISION_MICROS": + case 1: + message.picosTimestampPrecision = 1; + break; + case "TIMESTAMP_PRECISION_NANOS": + case 2: + message.picosTimestampPrecision = 2; + break; + case "TIMESTAMP_PRECISION_PICOS": + case 3: + message.picosTimestampPrecision = 3; + break; + } return message; }; @@ -1412,10 +1532,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.enableDisplayNameAttribute = false; + object.picosTimestampPrecision = options.enums === String ? "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED" : 0; + } if (message.enableDisplayNameAttribute != null && message.hasOwnProperty("enableDisplayNameAttribute")) object.enableDisplayNameAttribute = message.enableDisplayNameAttribute; + if (message.picosTimestampPrecision != null && message.hasOwnProperty("picosTimestampPrecision")) + object.picosTimestampPrecision = options.enums === String ? $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision[message.picosTimestampPrecision] === undefined ? message.picosTimestampPrecision : $root.google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision[message.picosTimestampPrecision] : message.picosTimestampPrecision; return object; }; @@ -1445,6 +1569,24 @@ return typeUrlPrefix + "/google.cloud.bigquery.storage.v1.AvroSerializationOptions"; }; + /** + * PicosTimestampPrecision enum. + * @name google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision + * @enum {number} + * @property {number} PICOS_TIMESTAMP_PRECISION_UNSPECIFIED=0 PICOS_TIMESTAMP_PRECISION_UNSPECIFIED value + * @property {number} TIMESTAMP_PRECISION_MICROS=1 TIMESTAMP_PRECISION_MICROS value + * @property {number} TIMESTAMP_PRECISION_NANOS=2 TIMESTAMP_PRECISION_NANOS value + * @property {number} TIMESTAMP_PRECISION_PICOS=3 TIMESTAMP_PRECISION_PICOS value + */ + AvroSerializationOptions.PicosTimestampPrecision = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED"] = 0; + values[valuesById[1] = "TIMESTAMP_PRECISION_MICROS"] = 1; + values[valuesById[2] = "TIMESTAMP_PRECISION_NANOS"] = 2; + values[valuesById[3] = "TIMESTAMP_PRECISION_PICOS"] = 3; + return values; + })(); + return AvroSerializationOptions; })(); @@ -10617,6 +10759,7 @@ * @property {number|Long|null} [precision] TableFieldSchema precision * @property {number|Long|null} [scale] TableFieldSchema scale * @property {string|null} [defaultValueExpression] TableFieldSchema defaultValueExpression + * @property {google.protobuf.IInt64Value|null} [timestampPrecision] TableFieldSchema timestampPrecision * @property {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null} [rangeElementType] TableFieldSchema rangeElementType */ @@ -10708,6 +10851,14 @@ */ TableFieldSchema.prototype.defaultValueExpression = ""; + /** + * TableFieldSchema timestampPrecision. + * @member {google.protobuf.IInt64Value|null|undefined} timestampPrecision + * @memberof google.cloud.bigquery.storage.v1.TableFieldSchema + * @instance + */ + TableFieldSchema.prototype.timestampPrecision = null; + /** * TableFieldSchema rangeElementType. * @member {google.cloud.bigquery.storage.v1.TableFieldSchema.IFieldElementType|null|undefined} rangeElementType @@ -10761,6 +10912,8 @@ writer.uint32(/* id 10, wireType 2 =*/82).string(message.defaultValueExpression); if (message.rangeElementType != null && Object.hasOwnProperty.call(message, "rangeElementType")) $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.encode(message.rangeElementType, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); + if (message.timestampPrecision != null && Object.hasOwnProperty.call(message, "timestampPrecision")) + $root.google.protobuf.Int64Value.encode(message.timestampPrecision, writer.uint32(/* id 27, wireType 2 =*/218).fork()).ldelim(); return writer; }; @@ -10835,6 +10988,10 @@ message.defaultValueExpression = reader.string(); break; } + case 27: { + message.timestampPrecision = $root.google.protobuf.Int64Value.decode(reader, reader.uint32()); + break; + } case 11: { message.rangeElementType = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.decode(reader, reader.uint32()); break; @@ -10934,6 +11091,11 @@ if (message.defaultValueExpression != null && message.hasOwnProperty("defaultValueExpression")) if (!$util.isString(message.defaultValueExpression)) return "defaultValueExpression: string expected"; + if (message.timestampPrecision != null && message.hasOwnProperty("timestampPrecision")) { + var error = $root.google.protobuf.Int64Value.verify(message.timestampPrecision); + if (error) + return "timestampPrecision." + error; + } if (message.rangeElementType != null && message.hasOwnProperty("rangeElementType")) { var error = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.verify(message.rangeElementType); if (error) @@ -11097,6 +11259,11 @@ message.scale = new $util.LongBits(object.scale.low >>> 0, object.scale.high >>> 0).toNumber(); if (object.defaultValueExpression != null) message.defaultValueExpression = String(object.defaultValueExpression); + if (object.timestampPrecision != null) { + if (typeof object.timestampPrecision !== "object") + throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.timestampPrecision: object expected"); + message.timestampPrecision = $root.google.protobuf.Int64Value.fromObject(object.timestampPrecision); + } if (object.rangeElementType != null) { if (typeof object.rangeElementType !== "object") throw TypeError(".google.cloud.bigquery.storage.v1.TableFieldSchema.rangeElementType: object expected"); @@ -11142,6 +11309,7 @@ object.scale = options.longs === String ? "0" : 0; object.defaultValueExpression = ""; object.rangeElementType = null; + object.timestampPrecision = null; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -11175,6 +11343,8 @@ object.defaultValueExpression = message.defaultValueExpression; if (message.rangeElementType != null && message.hasOwnProperty("rangeElementType")) object.rangeElementType = $root.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementType.toObject(message.rangeElementType, options); + if (message.timestampPrecision != null && message.hasOwnProperty("timestampPrecision")) + object.timestampPrecision = $root.google.protobuf.Int64Value.toObject(message.timestampPrecision, options); return object; }; @@ -41336,275 +41506,32 @@ }; /** - * Converts this Duration to JSON. - * @function toJSON - * @memberof google.protobuf.Duration - * @instance - * @returns {Object.} JSON object - */ - Duration.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for Duration - * @function getTypeUrl - * @memberof google.protobuf.Duration - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - Duration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.Duration"; - }; - - return Duration; - })(); - - protobuf.Timestamp = (function() { - - /** - * Properties of a Timestamp. - * @memberof google.protobuf - * @interface ITimestamp - * @property {number|Long|null} [seconds] Timestamp seconds - * @property {number|null} [nanos] Timestamp nanos - */ - - /** - * Constructs a new Timestamp. - * @memberof google.protobuf - * @classdesc Represents a Timestamp. - * @implements ITimestamp - * @constructor - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - */ - function Timestamp(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Timestamp seconds. - * @member {number|Long} seconds - * @memberof google.protobuf.Timestamp - * @instance - */ - Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Timestamp nanos. - * @member {number} nanos - * @memberof google.protobuf.Timestamp - * @instance - */ - Timestamp.prototype.nanos = 0; - - /** - * Creates a new Timestamp instance using the specified properties. - * @function create - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp=} [properties] Properties to set - * @returns {google.protobuf.Timestamp} Timestamp instance - */ - Timestamp.create = function create(properties) { - return new Timestamp(properties); - }; - - /** - * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @function encode - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Timestamp.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) - writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); - return writer; - }; - - /** - * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Timestamp.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a Timestamp message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.Timestamp - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.Timestamp} Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Timestamp.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - case 1: { - message.seconds = reader.int64(); - break; - } - case 2: { - message.nanos = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a Timestamp message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.Timestamp - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.Timestamp} Timestamp - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Timestamp.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a Timestamp message. - * @function verify - * @memberof google.protobuf.Timestamp - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Timestamp.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) - return "seconds: integer|Long expected"; - if (message.nanos != null && message.hasOwnProperty("nanos")) - if (!$util.isInteger(message.nanos)) - return "nanos: integer expected"; - return null; - }; - - /** - * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.Timestamp - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.Timestamp} Timestamp - */ - Timestamp.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.Timestamp) - return object; - var message = new $root.google.protobuf.Timestamp(); - if (object.seconds != null) - if ($util.Long) - (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; - else if (typeof object.seconds === "string") - message.seconds = parseInt(object.seconds, 10); - else if (typeof object.seconds === "number") - message.seconds = object.seconds; - else if (typeof object.seconds === "object") - message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); - if (object.nanos != null) - message.nanos = object.nanos | 0; - return message; - }; - - /** - * Creates a plain object from a Timestamp message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.Timestamp - * @static - * @param {google.protobuf.Timestamp} message Timestamp - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - Timestamp.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.seconds = options.longs === String ? "0" : 0; - object.nanos = 0; - } - if (message.seconds != null && message.hasOwnProperty("seconds")) - if (typeof message.seconds === "number") - object.seconds = options.longs === String ? String(message.seconds) : message.seconds; - else - object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; - if (message.nanos != null && message.hasOwnProperty("nanos")) - object.nanos = message.nanos; - return object; - }; - - /** - * Converts this Timestamp to JSON. + * Converts this Duration to JSON. * @function toJSON - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @instance * @returns {Object.} JSON object */ - Timestamp.prototype.toJSON = function toJSON() { + Duration.prototype.toJSON = function toJSON() { return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; /** - * Gets the default type url for Timestamp + * Gets the default type url for Duration * @function getTypeUrl - * @memberof google.protobuf.Timestamp + * @memberof google.protobuf.Duration * @static * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") * @returns {string} The default type url */ - Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + Duration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { if (typeUrlPrefix === undefined) { typeUrlPrefix = "type.googleapis.com"; } - return typeUrlPrefix + "/google.protobuf.Timestamp"; + return typeUrlPrefix + "/google.protobuf.Duration"; }; - return Timestamp; + return Duration; })(); protobuf.DoubleValue = (function() { @@ -43489,6 +43416,249 @@ return BytesValue; })(); + protobuf.Timestamp = (function() { + + /** + * Properties of a Timestamp. + * @memberof google.protobuf + * @interface ITimestamp + * @property {number|Long|null} [seconds] Timestamp seconds + * @property {number|null} [nanos] Timestamp nanos + */ + + /** + * Constructs a new Timestamp. + * @memberof google.protobuf + * @classdesc Represents a Timestamp. + * @implements ITimestamp + * @constructor + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + */ + function Timestamp(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Timestamp seconds. + * @member {number|Long} seconds + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Timestamp nanos. + * @member {number} nanos + * @memberof google.protobuf.Timestamp + * @instance + */ + Timestamp.prototype.nanos = 0; + + /** + * Creates a new Timestamp instance using the specified properties. + * @function create + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp=} [properties] Properties to set + * @returns {google.protobuf.Timestamp} Timestamp instance + */ + Timestamp.create = function create(properties) { + return new Timestamp(properties); + }; + + /** + * Encodes the specified Timestamp message. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encode + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) + writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); + return writer; + }; + + /** + * Encodes the specified Timestamp message, length delimited. Does not implicitly {@link google.protobuf.Timestamp.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.ITimestamp} message Timestamp message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Timestamp.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.Timestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.seconds = reader.int64(); + break; + } + case 2: { + message.nanos = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Timestamp message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.Timestamp + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.Timestamp} Timestamp + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Timestamp.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Timestamp message. + * @function verify + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Timestamp.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (!$util.isInteger(message.seconds) && !(message.seconds && $util.isInteger(message.seconds.low) && $util.isInteger(message.seconds.high))) + return "seconds: integer|Long expected"; + if (message.nanos != null && message.hasOwnProperty("nanos")) + if (!$util.isInteger(message.nanos)) + return "nanos: integer expected"; + return null; + }; + + /** + * Creates a Timestamp message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.Timestamp + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.Timestamp} Timestamp + */ + Timestamp.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.Timestamp) + return object; + var message = new $root.google.protobuf.Timestamp(); + if (object.seconds != null) + if ($util.Long) + (message.seconds = $util.Long.fromValue(object.seconds)).unsigned = false; + else if (typeof object.seconds === "string") + message.seconds = parseInt(object.seconds, 10); + else if (typeof object.seconds === "number") + message.seconds = object.seconds; + else if (typeof object.seconds === "object") + message.seconds = new $util.LongBits(object.seconds.low >>> 0, object.seconds.high >>> 0).toNumber(); + if (object.nanos != null) + message.nanos = object.nanos | 0; + return message; + }; + + /** + * Creates a plain object from a Timestamp message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.Timestamp + * @static + * @param {google.protobuf.Timestamp} message Timestamp + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Timestamp.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.seconds = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.seconds = options.longs === String ? "0" : 0; + object.nanos = 0; + } + if (message.seconds != null && message.hasOwnProperty("seconds")) + if (typeof message.seconds === "number") + object.seconds = options.longs === String ? String(message.seconds) : message.seconds; + else + object.seconds = options.longs === String ? $util.Long.prototype.toString.call(message.seconds) : options.longs === Number ? new $util.LongBits(message.seconds.low >>> 0, message.seconds.high >>> 0).toNumber() : message.seconds; + if (message.nanos != null && message.hasOwnProperty("nanos")) + object.nanos = message.nanos; + return object; + }; + + /** + * Converts this Timestamp to JSON. + * @function toJSON + * @memberof google.protobuf.Timestamp + * @instance + * @returns {Object.} JSON object + */ + Timestamp.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Timestamp + * @function getTypeUrl + * @memberof google.protobuf.Timestamp + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Timestamp.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.Timestamp"; + }; + + return Timestamp; + })(); + protobuf.Any = (function() { /** diff --git a/handwritten/bigquery-storage/protos/protos.json b/handwritten/bigquery-storage/protos/protos.json index 450b5bef965..3cf9dc5a2ae 100644 --- a/handwritten/bigquery-storage/protos/protos.json +++ b/handwritten/bigquery-storage/protos/protos.json @@ -61,6 +61,10 @@ "bufferCompression": { "type": "CompressionCodec", "id": 2 + }, + "picosTimestampPrecision": { + "type": "PicosTimestampPrecision", + "id": 3 } }, "nested": { @@ -70,6 +74,14 @@ "LZ4_FRAME": 1, "ZSTD": 2 } + }, + "PicosTimestampPrecision": { + "values": { + "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED": 0, + "TIMESTAMP_PRECISION_MICROS": 1, + "TIMESTAMP_PRECISION_NANOS": 2, + "TIMESTAMP_PRECISION_PICOS": 3 + } } } }, @@ -101,6 +113,20 @@ "enableDisplayNameAttribute": { "type": "bool", "id": 1 + }, + "picosTimestampPrecision": { + "type": "PicosTimestampPrecision", + "id": 2 + } + }, + "nested": { + "PicosTimestampPrecision": { + "values": { + "PICOS_TIMESTAMP_PRECISION_UNSPECIFIED": 0, + "TIMESTAMP_PRECISION_MICROS": 1, + "TIMESTAMP_PRECISION_NANOS": 2, + "TIMESTAMP_PRECISION_PICOS": 3 + } } } }, @@ -1042,7 +1068,7 @@ "type": "string", "id": 8, "options": { - "(google.api.field_behavior)": "IMMUTABLE" + "(google.api.field_behavior)": "OUTPUT_ONLY" } } }, @@ -1138,6 +1164,13 @@ "(google.api.field_behavior)": "OPTIONAL" } }, + "timestampPrecision": { + "type": "google.protobuf.Int64Value", + "id": 27, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, "rangeElementType": { "type": "FieldElementType", "id": 11, @@ -4162,18 +4195,6 @@ } } }, - "Timestamp": { - "fields": { - "seconds": { - "type": "int64", - "id": 1 - }, - "nanos": { - "type": "int32", - "id": 2 - } - } - }, "DoubleValue": { "fields": { "value": { @@ -4246,6 +4267,18 @@ } } }, + "Timestamp": { + "fields": { + "seconds": { + "type": "int64", + "id": 1 + }, + "nanos": { + "type": "int32", + "id": 2 + } + } + }, "Any": { "fields": { "type_url": { diff --git a/handwritten/bigquery-storage/src/adapt/proto.ts b/handwritten/bigquery-storage/src/adapt/proto.ts index d4c2cfbcb0b..c0636e1554c 100644 --- a/handwritten/bigquery-storage/src/adapt/proto.ts +++ b/handwritten/bigquery-storage/src/adapt/proto.ts @@ -345,7 +345,13 @@ function convertTableFieldSchemaToFieldDescriptorProto( label: label, }); } else { - const pType = bqTypeToFieldTypeMap[type]; + let pType = bqTypeToFieldTypeMap[type]; + if ( + type === TableFieldSchema.Type.TIMESTAMP && + Number(field?.timestampPrecision?.value) === 12 + ) { + pType = FieldDescriptorProto.Type.TYPE_STRING; + } if (pType === null) { throw Error(`table field type ${type} not supported`); } diff --git a/handwritten/bigquery-storage/src/adapt/schema.ts b/handwritten/bigquery-storage/src/adapt/schema.ts index c0c0f3de914..bd2a7d989bc 100644 --- a/handwritten/bigquery-storage/src/adapt/schema.ts +++ b/handwritten/bigquery-storage/src/adapt/schema.ts @@ -47,6 +47,11 @@ type ITableFieldSchema = { */ type?: string; + /** + * [Optional] The precision for TIMESTAMP fields. 6 for microsecond, 12 for picosecond. + */ + timestampPrecision?: number | string; + /** * Represents the type of a field element. */ @@ -101,6 +106,12 @@ function bqFieldToStorageField(field: ITableFieldSchema): StorageTableField { out.description = field.description; } + if (field.timestampPrecision) { + out.timestampPrecision = { + value: field.timestampPrecision, + }; + } + if (!field.type) { throw Error( `could not convert field (${field.name}) due to unknown type value: ${field.type}`, diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index fbe7d1d75f1..f1442e786c1 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -694,6 +694,136 @@ describe('managedwriter.WriterClient', () => { } }); + it('should invoke appendRows with picosecond precision timestamp without errors', async () => { + const picosTableId = generateUuid(); + const picosSchema: any = { + fields: [ + { + name: 'customer_name', + type: 'STRING', + mode: 'REQUIRED', + }, + { + name: 'row_num', + type: 'INTEGER', + mode: 'REQUIRED', + }, + { + name: 'created_at', + type: 'TIMESTAMP', + mode: 'NULLABLE', + timestampPrecision: 12, + }, + ], + }; + const [table] = await bigquery + .dataset(datasetId) + .createTable(picosTableId, {schema: picosSchema}); + const picosParent = `projects/${projectId}/datasets/${datasetId}/tables/${table.id}`; + + bqWriteClient.initialize().catch(err => { + throw err; + }); + const streamType: WriteStream['type'] = managedwriter.PendingStream; + const client = new WriterClient(); + client.setClient(bqWriteClient); + + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(picosSchema); + const protoDescriptor: DescriptorProto = + adapt.convertStorageSchemaToProto2Descriptor(storageSchema, 'root'); + + // Row 1 + const expectedTsValue = '2023-01-01T12:00:00.123456789123Z'; + const row1 = { + customer_name: 'Ada Lovelace', + row_num: 1, + created_at: expectedTsValue, + }; + + const offset: IInt64Value['value'] = '0'; + + const streamId = await client.createWriteStream({ + streamType, + destinationTable: picosParent, + }); + const appendRowsResponsesResult: AppendRowsResponse[] = [ + { + appendResult: { + offset: { + value: offset, + }, + }, + writeStream: streamId, + }, + ]; + try { + const connection = await client.createStreamConnection({ + streamId, + }); + const writer = new JSONWriter({ + connection, + protoDescriptor, + }); + const pw = writer.appendRows([row1], offset); + const result = await pw.getResult(); + const responses: AppendRowsResponse[] = [ + { + appendResult: result.appendResult, + writeStream: result.writeStream, + }, + ]; + + assert.deepEqual(appendRowsResponsesResult, responses); + + const res = await connection.finalize(); + connection.close(); + assert.equal(res?.rowCount, 1); + + const commitResponse = await client.batchCommitWriteStream({ + parent: picosParent, + writeStreams: [streamId], + }); + assert.equal(commitResponse.streamErrors?.length, 0); + + writer.close(); + + // Now read to make sure the written data is correct: + const options: {[key: string]: any} = {}; + const timestampOutputFormat = 'ISO8601_STRING'; + const useInt64Timestamp = false; + + options['formatOptions.timestampOutputFormat'] = timestampOutputFormat; + options['formatOptions.useInt64Timestamp'] = useInt64Timestamp; + + // TODO: When the latest version of Bigquery is released supporting high + // precision reads then we should use that instead of request here. + await new Promise((resolve, reject) => { + (table as any).request( + { + uri: '/data', + qs: options, + }, + (err: any, resp: any) => { + if (err) { + reject(err); + return; + } + try { + assert(resp.rows && resp.rows.length > 0); + assert.strictEqual(resp.rows[0].f[2].v, expectedTsValue); + resolve(); + } catch (e) { + reject(e); + } + }, + ); + }); + } finally { + client.close(); + } + }); + it('should update proto descriptor automatically with appendRows without errors', async () => { bqWriteClient.initialize().catch(err => { throw err; diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index 584b7f8c513..c1bf2fe8eaa 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -445,6 +445,47 @@ describe('Adapt Protos', () => { const decoded = TestProto.decode(serialized).toJSON(); assert.deepEqual(raw, decoded); }); + + it('timestamp precision', () => { + const schema = { + fields: [ + { + name: 'ts', + type: 'TIMESTAMP', + timestampPrecision: 12, + }, + ], + }; + const storageSchema = + adapt.convertBigQuerySchemaToStorageTableSchema(schema); + const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( + storageSchema, + 'Test', + ); + assert.notEqual(protoDescriptor, null); + assert.deepStrictEqual(JSON.parse(JSON.stringify(protoDescriptor)), { + name: 'Test', + field: [ + { + name: 'ts', + number: 1, + label: 'LABEL_OPTIONAL', + type: 'TYPE_STRING', + options: {}, + }, + ], + }); + if (!protoDescriptor) { + throw Error('null proto descriptor set'); + } + const TestProto = Type.fromDescriptor(protoDescriptor); + const raw = { + ts: '2024-04-05T15:45:58.981123456789Z', + }; + const serialized = TestProto.encode(raw).finish(); + const decoded = TestProto.decode(serialized).toJSON(); + assert.deepEqual(raw, decoded); + }); }); describe('Proto descriptor normalization', () => { From ce5b3a789358c64fd6349b8cacf6ca52deceaa91 Mon Sep 17 00:00:00 2001 From: Gautam Sharda Date: Wed, 25 Feb 2026 21:48:03 +0000 Subject: [PATCH 329/333] build: add release-please config, fix owlbot-config --- .github/CODEOWNERS | 1 + .release-please-manifest.json | 5 +- .../{.github => }/.OwlBot.yaml | 6 +- .../.github/.OwlBot.lock.yaml | 17 --- .../bigquery-storage/.github/CODEOWNERS | 9 -- .../.github/ISSUE_TEMPLATE/bug_report.yml | 99 ---------------- .../.github/ISSUE_TEMPLATE/config.yml | 4 - .../ISSUE_TEMPLATE/documentation_request.yml | 53 --------- .../ISSUE_TEMPLATE/feature_request.yml | 53 --------- .../ISSUE_TEMPLATE/processs_request.md | 4 - .../.github/ISSUE_TEMPLATE/questions.md | 8 -- .../.github/ISSUE_TEMPLATE/support_request.md | 7 -- .../.github/PULL_REQUEST_TEMPLATE.md | 30 ----- .../bigquery-storage/.github/auto-approve.yml | 2 - .../bigquery-storage/.github/auto-label.yaml | 2 - .../.github/generated-files-bot.yml | 16 --- .../.github/release-please.yml | 2 - .../.github/release-trigger.yml | 2 - .../.github/scripts/close-invalid-link.cjs | 73 ------------ .../.github/scripts/close-unresponsive.cjs | 69 ----------- .../scripts/fixtures/invalidIssueBody.txt | 50 -------- .../scripts/fixtures/validIssueBody.txt | 50 -------- .../validIssueBodyDifferentLinkLocation.txt | 50 -------- .../.github/scripts/package.json | 21 ---- .../.github/scripts/remove-response-label.cjs | 33 ------ .../scripts/tests/close-invalid-link.test.cjs | 86 -------------- .../close-or-remove-response-label.test.cjs | 109 ------------------ .../.github/sync-repo-settings.yaml | 23 ---- .../bigquery-storage/.kokoro/common.cfg | 4 +- .../.kokoro/continuous/node18/common.cfg | 4 +- .../.kokoro/continuous/node18/lint.cfg | 2 +- .../continuous/node18/samples-test.cfg | 2 +- .../.kokoro/continuous/node18/system-test.cfg | 2 +- .../.kokoro/presubmit/node18/common.cfg | 4 +- .../.kokoro/presubmit/node18/samples-test.cfg | 2 +- .../.kokoro/presubmit/node18/system-test.cfg | 2 +- .../.kokoro/presubmit/windows/test.cfg | 2 +- .../.kokoro/release/docs-devsite.cfg | 4 +- .../bigquery-storage/.kokoro/release/docs.cfg | 4 +- .../.kokoro/release/publish.cfg | 4 +- .../bigquery-storage/.kokoro/trampoline_v2.sh | 24 +++- .../bigquery-storage/.repo-metadata.json | 2 +- handwritten/bigquery-storage/.trampolinerc | 2 +- handwritten/bigquery-storage/owlbot.py | 4 +- handwritten/bigquery-storage/package.json | 10 +- release-please-submodules.json | 15 +-- 46 files changed, 65 insertions(+), 912 deletions(-) rename handwritten/bigquery-storage/{.github => }/.OwlBot.yaml (81%) delete mode 100644 handwritten/bigquery-storage/.github/.OwlBot.lock.yaml delete mode 100644 handwritten/bigquery-storage/.github/CODEOWNERS delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md delete mode 100644 handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 handwritten/bigquery-storage/.github/auto-approve.yml delete mode 100644 handwritten/bigquery-storage/.github/auto-label.yaml delete mode 100644 handwritten/bigquery-storage/.github/generated-files-bot.yml delete mode 100644 handwritten/bigquery-storage/.github/release-please.yml delete mode 100644 handwritten/bigquery-storage/.github/release-trigger.yml delete mode 100644 handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs delete mode 100644 handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs delete mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt delete mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt delete mode 100644 handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt delete mode 100644 handwritten/bigquery-storage/.github/scripts/package.json delete mode 100644 handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs delete mode 100644 handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs delete mode 100644 handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs delete mode 100644 handwritten/bigquery-storage/.github/sync-repo-settings.yaml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ae7db338588..81841a7b74d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -9,3 +9,4 @@ * @googleapis/cloud-sdk-nodejs-team /handwritten/bigquery @googleapis/bigquery-team /handwritten/cloud-profiler @googleapis/cloud-profiler-team +/handwritten/bigquery-storage @googleapis/bigquery-team diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9afa1ee8000..a8161f48b41 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,9 +1,10 @@ { - "handwritten/datastore": "10.1.0", - "handwritten/logging-winston": "6.0.1", "handwritten/bigquery": "8.2.0", + "handwritten/bigquery-storage": "5.1.0", "handwritten/cloud-profiler": "6.0.4", + "handwritten/datastore": "10.1.0", "handwritten/logging-bunyan": "5.1.1", + "handwritten/logging-winston": "6.0.1", "packages/gapic-node-processing": "0.1.6", "packages/google-ads-admanager": "0.5.0", "packages/google-ads-datamanager": "0.1.0", diff --git a/handwritten/bigquery-storage/.github/.OwlBot.yaml b/handwritten/bigquery-storage/.OwlBot.yaml similarity index 81% rename from handwritten/bigquery-storage/.github/.OwlBot.yaml rename to handwritten/bigquery-storage/.OwlBot.yaml index 2d27e09de99..e35546ed08d 100644 --- a/handwritten/bigquery-storage/.github/.OwlBot.yaml +++ b/handwritten/bigquery-storage/.OwlBot.yaml @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest deep-preserve-regex: - /owl-bot-staging/v1alpha2 @@ -23,8 +21,8 @@ deep-remove-regex: - /owl-bot-staging deep-copy-regex: - - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs/(.*) - dest: /owl-bot-staging/$1/$2 + - source: /google/cloud/bigquery/storage/(v.*)/.*-nodejs + dest: /owl-bot-staging/bigquery-storage/$1 begin-after-commit-hash: e0ea8b51f30e2ff6104abd1e4c8d1eb67078c86a diff --git a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml b/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml deleted file mode 100644 index 3037bc547d9..00000000000 --- a/handwritten/bigquery-storage/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:b612d739b0533e56ba174526ca339f264b63e911c30d6f83f55b57c38cc6ad2a -# created: 2025-08-15T12:36:48.871481111Z diff --git a/handwritten/bigquery-storage/.github/CODEOWNERS b/handwritten/bigquery-storage/.github/CODEOWNERS deleted file mode 100644 index 4fabf4416d6..00000000000 --- a/handwritten/bigquery-storage/.github/CODEOWNERS +++ /dev/null @@ -1,9 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax - - -# Unless specified, the jsteam is the default owner for nodejs repositories. -* @googleapis/bigquery-team @googleapis/cloud-sdk-nodejs-team @googleapis/jsteam \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index c5b86023a0f..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Bug Report -description: Create a report to help us improve -labels: - - bug -body: - - type: markdown - attributes: - value: > - **PLEASE READ**: If you have a support contract with Google, please - create an issue in the [support - console](https://cloud.google.com/support/) instead of filing on GitHub. - This will ensure a timely response. Otherwise, please make sure to - follow the steps below. - - type: checkboxes - attributes: - label: Please make sure you have searched for information in the following - guides. - options: - - label: "Search the issues already opened: - https://github.com/GoogleCloudPlatform/google-cloud-node/issues" - required: true - - label: "Search StackOverflow: - http://stackoverflow.com/questions/tagged/google-cloud-platform+nod\ - e.js" - required: true - - label: "Check our Troubleshooting guide: - https://github.com/googleapis/google-cloud-node/blob/main/docs/trou\ - bleshooting.md" - required: true - - label: "Check our FAQ: - https://github.com/googleapis/google-cloud-node/blob/main/docs/faq.\ - md" - required: true - - label: "Check our libraries HOW-TO: - https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ - .md" - required: true - - label: "Check out our authentication guide: - https://github.com/googleapis/google-auth-library-nodejs" - required: true - - label: "Check out handwritten samples for many of our APIs: - https://github.com/GoogleCloudPlatform/nodejs-docs-samples" - required: true - - type: textarea - attributes: - label: > - A screenshot that you have tested with "Try this API". - description: > - As our client libraries are mostly autogenerated, we kindly request - that you test whether your issue is with the client library, or with the - API itself. To do so, please search for your API - here: https://developers.google.com/apis-explorer and attempt to - reproduce the issue in the given method. Please include a screenshot of - the response in "Try this API". This response should NOT match the current - behavior you are experiencing. If the behavior is the same, it means - that you are likely experiencing a bug with the API itself. In that - case, please submit an issue to the API team, either by submitting an - issue in its issue tracker (https://cloud.google.com/support/docs/issue-trackers), or by - submitting an issue in its linked tracker in the .repo-metadata.json - file https://b.corp.google.com/savedsearches/559654 - validations: - required: true - - type: input - attributes: - label: > - Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal - reproduction. - description: > - **Skipping this or providing an invalid link will result in the issue being closed** - validations: - required: true - - type: textarea - attributes: - label: > - A step-by-step description of how to reproduce the issue, based on - the linked reproduction. - description: > - Screenshots can be provided in the issue body below. - placeholder: | - 1. Start the application in development (next dev) - 2. Click X - 3. Y will happen - validations: - required: true - - type: textarea - attributes: - label: A clear and concise description of what the bug is, and what you - expected to happen. - placeholder: Following the steps from the previous section, I expected A to - happen, but I observed B instead - validations: - required: true - - - type: textarea - attributes: - label: A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** - placeholder: 'Documentation here(link) states that B should happen instead of A' - validations: - required: true diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 603b90133b6..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,4 +0,0 @@ -contact_links: - - name: Google Cloud Support - url: https://cloud.google.com/support/ - about: If you have a support contract with Google, please use the Google Cloud Support portal. diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml deleted file mode 100644 index e78086c752b..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/documentation_request.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Documentation Requests -description: Requests for more information -body: - - type: markdown - attributes: - value: > - Please use this issue type to log documentation requests against the library itself. - These requests should involve documentation on Github (`.md` files), and should relate to the library - itself. If you have questions or documentation requests for an API, please - reach out to the API tracker itself. - - Please submit an issue to the API team, either by submitting an - issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers), or by - submitting an issue in its linked tracker in the .repo-metadata.json - file in the API under packages/* ([example](https://b.corp.google.com/savedsearches/559654)). - You can also submit a request to documentation on cloud.google.com itself with the "Send Feedback" - on the bottom of the page. - - - Please note that documentation requests and questions for specific APIs - will be closed. - - type: checkboxes - attributes: - label: Please make sure you have searched for information in the following - guides. - options: - - label: "Search the issues already opened: - https://github.com/GoogleCloudPlatform/google-cloud-node/issues" - required: true - - label: "Check our Troubleshooting guide: - https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ - es/troubleshooting" - required: true - - label: "Check our FAQ: - https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ - es/faq" - required: true - - label: "Check our libraries HOW-TO: - https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ - .md" - required: true - - label: "Check out our authentication guide: - https://github.com/googleapis/google-auth-library-nodejs" - required: true - - label: "Check out handwritten samples for many of our APIs: - https://github.com/GoogleCloudPlatform/nodejs-docs-samples" - required: true - - type: textarea - attributes: - label: > - Documentation Request - validations: - required: true diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index 344778ae1f2..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Feature Request -description: Suggest an idea for this library -labels: - - feature request -body: - - type: markdown - attributes: - value: > - **PLEASE READ**: If you have a support contract with Google, please - create an issue in the [support - console](https://cloud.google.com/support/) instead of filing on GitHub. - This will ensure a timely response. Otherwise, please make sure to - follow the steps below. - - type: textarea - attributes: - label: > - A screenshot that you have tested with "Try this API". - description: > - As our client libraries are mostly autogenerated, we kindly request - that you test whether your feature request is with the client library, or with the - API itself. To do so, please search for your API - here: https://developers.google.com/apis-explorer and attempt to - reproduce the issue in the given method. Please include a screenshot of - the response in "Try this API". This response should NOT match the current - behavior you are experiencing. If the behavior is the same, it means - that you are likely requesting a feature for the API itself. In that - case, please submit an issue to the API team, either by submitting an - issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers, or by - submitting an issue in its linked tracker in the .repo-metadata.json - file in the API under packages/* ([example](https://b.corp.google.com/savedsearches/559654)) - - Example of library specific issues would be: retry strategies, authentication questions, or issues with typings. - Examples of API issues would include: expanding method parameter types, adding functionality to an API. - validations: - required: true - - type: textarea - attributes: - label: > - What would you like to see in the library? - description: > - Screenshots can be provided in the issue body below. - placeholder: | - 1. Set up authentication like so - 2. Run the program like so - 3. X would be nice to happen - - - type: textarea - attributes: - label: Describe alternatives you've considered - - - type: textarea - attributes: - label: Additional context/notes \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md deleted file mode 100644 index 45682e8f117..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/processs_request.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -name: Process Request -about: Submit a process request to the library. Process requests are any requests related to library infrastructure, for example CI/CD, publishing, releasing, broken links. ---- diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md deleted file mode 100644 index 62c1dd1b93a..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/questions.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: Question -about: If you have a question, please use Discussions - ---- - -If you have a general question that goes beyond the library itself, we encourage you to use [Discussions](https://github.com//discussions) -to engage with fellow community members! diff --git a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md b/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 99586903212..00000000000 --- a/handwritten/bigquery-storage/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md b/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 15ce116d1fe..00000000000 --- a/handwritten/bigquery-storage/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,30 +0,0 @@ -> Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - -## Description - -> Please provide a detailed description for the change. -> As much as possible, please try to keep changes separate by purpose. For example, try not to make a one-line bug fix in a feature request, or add an irrelevant README change to a bug fix. - -## Impact - -> What's the impact of this change? - -## Testing - -> Have you added unit and integration tests if necessary? -> Were any tests changed? Are any breaking changes necessary? - -## Additional Information - -> Any additional details that we should be aware of? - -## Checklist - -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease -- [ ] Appropriate docs were updated -- [ ] Appropriate comments were added, particularly in complex areas or places that require background -- [ ] No new warnings or issues will be generated from this change - -Fixes #issue_number_goes_here 🦕 diff --git a/handwritten/bigquery-storage/.github/auto-approve.yml b/handwritten/bigquery-storage/.github/auto-approve.yml deleted file mode 100644 index 7cba0af636c..00000000000 --- a/handwritten/bigquery-storage/.github/auto-approve.yml +++ /dev/null @@ -1,2 +0,0 @@ -processes: - - "NodeDependency" \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/auto-label.yaml b/handwritten/bigquery-storage/.github/auto-label.yaml deleted file mode 100644 index 09c8d735b45..00000000000 --- a/handwritten/bigquery-storage/.github/auto-label.yaml +++ /dev/null @@ -1,2 +0,0 @@ -requestsize: - enabled: true diff --git a/handwritten/bigquery-storage/.github/generated-files-bot.yml b/handwritten/bigquery-storage/.github/generated-files-bot.yml deleted file mode 100644 index 992ccef4a13..00000000000 --- a/handwritten/bigquery-storage/.github/generated-files-bot.yml +++ /dev/null @@ -1,16 +0,0 @@ -generatedFiles: -- path: '.kokoro/**' - message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: '.github/CODEOWNERS' - message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' -- path: '.github/workflows/ci.yaml' - message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: '.github/generated-files-bot.+(yml|yaml)' - message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' -- path: 'README.md' - message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' -- path: 'samples/README.md' - message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' -ignoreAuthors: -- 'gcf-owl-bot[bot]' -- 'yoshi-automation' diff --git a/handwritten/bigquery-storage/.github/release-please.yml b/handwritten/bigquery-storage/.github/release-please.yml deleted file mode 100644 index a1b41da3cb3..00000000000 --- a/handwritten/bigquery-storage/.github/release-please.yml +++ /dev/null @@ -1,2 +0,0 @@ -handleGHRelease: true -releaseType: node diff --git a/handwritten/bigquery-storage/.github/release-trigger.yml b/handwritten/bigquery-storage/.github/release-trigger.yml deleted file mode 100644 index 521eddda376..00000000000 --- a/handwritten/bigquery-storage/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: nodejs-bigquery-storage \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs b/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs deleted file mode 100644 index fdb51488197..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/close-invalid-link.cjs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const fs = require('fs'); -const yaml = require('js-yaml'); -const path = require('path'); -const TEMPLATE_FILE_PATH = path.resolve(__dirname, '../ISSUE_TEMPLATE/bug_report.yml') - -async function closeIssue(github, owner, repo, number) { - await github.rest.issues.createComment({ - owner: owner, - repo: repo, - issue_number: number, - body: "Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)" - }); - await github.rest.issues.update({ - owner: owner, - repo: repo, - issue_number: number, - state: "closed" - }); -} -module.exports = async ({ github, context }) => { - const owner = context.repo.owner; - const repo = context.repo.repo; - const number = context.issue.number; - - const issue = await github.rest.issues.get({ - owner: owner, - repo: repo, - issue_number: number, - }); - - const yamlData = fs.readFileSync(TEMPLATE_FILE_PATH, 'utf8'); - const obj = yaml.load(yamlData); - const linkMatchingText = (obj.body.find(x => {return x.type === 'input' && x.validations.required === true && x.attributes.label.includes('link')})).attributes.label; - const isBugTemplate = issue.data.body.includes(linkMatchingText); - - if (isBugTemplate) { - console.log(`Issue ${number} is a bug template`) - try { - const text = issue.data.body; - const match = text.indexOf(linkMatchingText); - if (match !== -1) { - const nextLineIndex = text.indexOf('http', match); - if (nextLineIndex == -1) { - await closeIssue(github, owner, repo, number); - return; - } - const link = text.substring(nextLineIndex, text.indexOf('\n', nextLineIndex)); - console.log(`Issue ${number} contains this link: ${link}`); - const isValidLink = (await fetch(link)).ok; - console.log(`Issue ${number} has a ${isValidLink ? "valid" : "invalid"} link`) - if (!isValidLink) { - await closeIssue(github, owner, repo, number); - } - } - } catch (err) { - await closeIssue(github, owner, repo, number); - } - } -}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs b/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs deleted file mode 100644 index 6f81b508fa5..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/close-unresponsive.cjs +++ /dev/null @@ -1,69 +0,0 @@ -/// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -function labeledEvent(data) { - return data.event === "labeled" && data.label.name === "needs more info"; -} - -const numberOfDaysLimit = 15; -const close_message = `This has been closed since a request for information has \ -not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ -requested information is provided.`; - -module.exports = async ({ github, context }) => { - const owner = context.repo.owner; - const repo = context.repo.repo; - - const issues = await github.rest.issues.listForRepo({ - owner: owner, - repo: repo, - labels: "needs more info", - }); - const numbers = issues.data.map((e) => e.number); - - for (const number of numbers) { - const events = await github.paginate( - github.rest.issues.listEventsForTimeline, - { - owner: owner, - repo: repo, - issue_number: number, - }, - (response) => response.data.filter(labeledEvent) - ); - - const latest_response_label = events[events.length - 1]; - - const created_at = new Date(latest_response_label.created_at); - const now = new Date(); - const diff = now - created_at; - const diffDays = diff / (1000 * 60 * 60 * 24); - - if (diffDays > numberOfDaysLimit) { - await github.rest.issues.update({ - owner: owner, - repo: repo, - issue_number: number, - state: "closed", - }); - - await github.rest.issues.createComment({ - owner: owner, - repo: repo, - issue_number: number, - body: close_message, - }); - } - } -}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt deleted file mode 100644 index 504bd669022..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/fixtures/invalidIssueBody.txt +++ /dev/null @@ -1,50 +0,0 @@ -### Please make sure you have searched for information in the following guides. - -- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues -- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js -- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting -- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq -- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md -- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs -- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples - -### A screenshot that you have tested with "Try this API". - - -N/A - -### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. - -not-a-link - -### A step-by-step description of how to reproduce the issue, based on the linked reproduction. - - -Change MY_PROJECT to your project name, add credentials if needed and run. - -### A clear and concise description of what the bug is, and what you expected to happen. - -The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. -TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object - at _write (node:internal/streams/writable:474:13) - at Writable.write (node:internal/streams/writable:502:10) - at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) - at Object.onceWrapper (node:events:633:26) - at Pumpify.emit (node:events:518:28) - at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at PassThrough.ondata (node:internal/streams/readable:1007:22) - at PassThrough.emit (node:events:518:28) - at addChunk (node:internal/streams/readable:559:12) { - code: 'ERR_INVALID_ARG_TYPE' - - -### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** - -No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt deleted file mode 100644 index 6e0ace338eb..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBody.txt +++ /dev/null @@ -1,50 +0,0 @@ -### Please make sure you have searched for information in the following guides. - -- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues -- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js -- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting -- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq -- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md -- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs -- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples - -### A screenshot that you have tested with "Try this API". - - -N/A - -### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. - -https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 - -### A step-by-step description of how to reproduce the issue, based on the linked reproduction. - - -Change MY_PROJECT to your project name, add credentials if needed and run. - -### A clear and concise description of what the bug is, and what you expected to happen. - -The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. -TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object - at _write (node:internal/streams/writable:474:13) - at Writable.write (node:internal/streams/writable:502:10) - at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) - at Object.onceWrapper (node:events:633:26) - at Pumpify.emit (node:events:518:28) - at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at PassThrough.ondata (node:internal/streams/readable:1007:22) - at PassThrough.emit (node:events:518:28) - at addChunk (node:internal/streams/readable:559:12) { - code: 'ERR_INVALID_ARG_TYPE' - - -### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** - -No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt b/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt deleted file mode 100644 index 984a420e376..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt +++ /dev/null @@ -1,50 +0,0 @@ -### Please make sure you have searched for information in the following guides. - -- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues -- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js -- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting -- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq -- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md -- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs -- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples - -### A screenshot that you have tested with "Try this API". - - -N/A - -### A step-by-step description of how to reproduce the issue, based on the linked reproduction. - - -Change MY_PROJECT to your project name, add credentials if needed and run. - -### A clear and concise description of what the bug is, and what you expected to happen. - -The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. -TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object - at _write (node:internal/streams/writable:474:13) - at Writable.write (node:internal/streams/writable:502:10) - at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) - at Object.onceWrapper (node:events:633:26) - at Pumpify.emit (node:events:518:28) - at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) - at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) - at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) - at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) - at PassThrough.ondata (node:internal/streams/readable:1007:22) - at PassThrough.emit (node:events:518:28) - at addChunk (node:internal/streams/readable:559:12) { - code: 'ERR_INVALID_ARG_TYPE' - -### Link to the code that reproduces this issue. A link to a **public** Github Repository with a minimal reproduction. - - -https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 - -### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** - -No library should crash an application this way. \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/package.json b/handwritten/bigquery-storage/.github/scripts/package.json deleted file mode 100644 index 84b201023bf..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "tests", - "private": true, - "description": "tests for script", - "scripts": { - "test": "mocha tests/close-invalid-link.test.cjs && mocha tests/close-or-remove-response-label.test.cjs" - }, - "author": "Google Inc.", - "license": "Apache-2.0", - "engines": { - "node": ">=18" - }, - "dependencies": { - "js-yaml": "^4.1.0" - }, - "devDependencies": { - "@octokit/rest": "^21.0.0", - "mocha": "^10.0.0", - "sinon": "^21.0.0" - } -} \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs b/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs deleted file mode 100644 index 4a784ddf7a5..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/remove-response-label.cjs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -module.exports = async ({ github, context }) => { - const commenter = context.actor; - const issue = await github.rest.issues.get({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - }); - const author = issue.data.user.login; - const labels = issue.data.labels.map((e) => e.name); - - if (author === commenter && labels.includes("needs more info")) { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - name: "needs more info", - }); - } -}; \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs b/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs deleted file mode 100644 index f63ee89c811..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/tests/close-invalid-link.test.cjs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -const { describe, it } = require('mocha'); -const closeInvalidLink = require('../close-invalid-link.cjs'); -const fs = require('fs'); -const sinon = require('sinon'); - -describe('close issues with invalid links', () => { - let octokitStub; - let issuesStub; - - beforeEach(() => { - issuesStub = { - get: sinon.stub(), - createComment: sinon.stub(), - update: sinon.stub(), - }; - octokitStub = { - rest: { - issues: issuesStub, - }, - }; - }); - - afterEach(() => { - sinon.restore(); - }); - - it('does not do anything if it is not a bug', async () => { - const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - issuesStub.get.resolves({ data: { body: "I'm having a problem with this." } }); - - await closeInvalidLink({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.notCalled(issuesStub.createComment); - sinon.assert.notCalled(issuesStub.update); - }); - - it('does not do anything if it is a bug with an appropriate link', async () => { - const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBody.txt', 'utf-8') } }); - - await closeInvalidLink({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.notCalled(issuesStub.createComment); - sinon.assert.notCalled(issuesStub.update); - }); - - it('does not do anything if it is a bug with an appropriate link and the template changes', async () => { - const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBodyDifferentLinkLocation.txt', 'utf-8') } }); - - await closeInvalidLink({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.notCalled(issuesStub.createComment); - sinon.assert.notCalled(issuesStub.update); - }); - - it('closes the issue if the link is invalid', async () => { - const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/invalidIssueBody.txt', 'utf-8') } }); - - await closeInvalidLink({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.calledOnce(issuesStub.createComment); - sinon.assert.calledOnce(issuesStub.update); - }); -}); \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs b/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs deleted file mode 100644 index fb092c53619..00000000000 --- a/handwritten/bigquery-storage/.github/scripts/tests/close-or-remove-response-label.test.cjs +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -const { describe, it, beforeEach, afterEach } = require('mocha'); -const removeResponseLabel = require('../remove-response-label.cjs'); -const closeUnresponsive = require('../close-unresponsive.cjs'); -const sinon = require('sinon'); - -function getISODateDaysAgo(days) { - const today = new Date(); - const daysAgo = new Date(today.setDate(today.getDate() - days)); - return daysAgo.toISOString(); -} - -describe('close issues or remove needs more info labels', () => { - let octokitStub; - let issuesStub; - let paginateStub; - - beforeEach(() => { - issuesStub = { - listForRepo: sinon.stub(), - update: sinon.stub(), - createComment: sinon.stub(), - get: sinon.stub(), - removeLabel: sinon.stub(), - }; - paginateStub = sinon.stub(); - octokitStub = { - rest: { - issues: issuesStub, - }, - paginate: paginateStub, - }; - }); - - afterEach(() => { - sinon.restore(); - }); - - it('closes the issue if the OP has not responded within the allotted time and there is a needs-more-info label', async () => { - const context = { owner: 'testOrg', repo: 'testRepo' }; - const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; - const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(16) }]; - - issuesStub.listForRepo.resolves({ data: issuesInRepo }); - paginateStub.resolves(eventsInIssue); - - await closeUnresponsive({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.listForRepo); - sinon.assert.calledOnce(paginateStub); - sinon.assert.calledOnce(issuesStub.update); - sinon.assert.calledOnce(issuesStub.createComment); - }); - - it('does nothing if not enough time has passed and there is a needs-more-info label', async () => { - const context = { owner: 'testOrg', repo: 'testRepo' }; - const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; - const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(14) }]; - - issuesStub.listForRepo.resolves({ data: issuesInRepo }); - paginateStub.resolves(eventsInIssue); - - await closeUnresponsive({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.listForRepo); - sinon.assert.calledOnce(paginateStub); - sinon.assert.notCalled(issuesStub.update); - sinon.assert.notCalled(issuesStub.createComment); - }); - - it('removes the label if OP responded', async () => { - const context = { actor: 'OP', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; - - issuesStub.get.resolves({ data: issueContext }); - - await removeResponseLabel({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.calledOnce(issuesStub.removeLabel); - }); - - it('does not remove the label if author responded', async () => { - const context = { actor: 'repo-maintainer', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; - const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; - - issuesStub.get.resolves({ data: issueContext }); - - await removeResponseLabel({ github: octokitStub, context }); - - sinon.assert.calledOnce(issuesStub.get); - sinon.assert.notCalled(issuesStub.removeLabel); - }); -}); \ No newline at end of file diff --git a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml b/handwritten/bigquery-storage/.github/sync-repo-settings.yaml deleted file mode 100644 index a013376d1cb..00000000000 --- a/handwritten/bigquery-storage/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,23 +0,0 @@ -branchProtectionRules: - - pattern: main - isAdminEnforced: true - requiredApprovingReviewCount: 1 - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - "ci/kokoro: Samples test" - - "ci/kokoro: System test" - - lint - - test (18) - - test (20) - - test (22) - - cla/google - - windows - - OwlBot Post Processor -permissionRules: - - team: yoshi-admins - permission: admin - - team: jsteam-admins - permission: admin - - team: jsteam - permission: push diff --git a/handwritten/bigquery-storage/.kokoro/common.cfg b/handwritten/bigquery-storage/.kokoro/common.cfg index 2339f0880ee..46fa36937bd 100644 --- a/handwritten/bigquery-storage/.kokoro/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,5 +20,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/test.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg index 2339f0880ee..46fa36937bd 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node18/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,5 +20,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/test.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg index 0a5d546b96b..7b2b57818bf 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node18/lint.cfg @@ -1,4 +1,4 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/lint.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/lint.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg index 9ca77598360..be0b200a792 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node18/samples-test.cfg @@ -3,7 +3,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/samples-test.sh" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg b/handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg index 42454cf416c..25f57eb1376 100644 --- a/handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/continuous/node18/system-test.cfg @@ -3,7 +3,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/system-test.sh" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg index 2339f0880ee..46fa36937bd 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node18/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,5 +20,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/test.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg index 9ca77598360..be0b200a792 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node18/samples-test.cfg @@ -3,7 +3,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/samples-test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/samples-test.sh" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg index 42454cf416c..25f57eb1376 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/node18/system-test.cfg @@ -3,7 +3,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/system-test.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/system-test.sh" } env_vars: { diff --git a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg index 83de067d5f1..fcebee370ab 100644 --- a/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg +++ b/handwritten/bigquery-storage/.kokoro/presubmit/windows/test.cfg @@ -1,2 +1,2 @@ # Use the test file directly -build_file: "nodejs-bigquery-storage/.kokoro/test.bat" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/test.bat" diff --git a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg index 8c834f1f2a1..9b7cbea3dad 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs-devsite.cfg @@ -18,9 +18,9 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/release/docs-devsite.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/release/docs-devsite.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/release/docs.cfg b/handwritten/bigquery-storage/.kokoro/release/docs.cfg index 4dc3fc24d2b..d886e7a565c 100644 --- a/handwritten/bigquery-storage/.kokoro/release/docs.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/docs.cfg @@ -18,9 +18,9 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/release/docs.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/release/docs.sh" } diff --git a/handwritten/bigquery-storage/.kokoro/release/publish.cfg b/handwritten/bigquery-storage/.kokoro/release/publish.cfg index 1ab8d3f84b5..8b38abf648b 100644 --- a/handwritten/bigquery-storage/.kokoro/release/publish.cfg +++ b/handwritten/bigquery-storage/.kokoro/release/publish.cfg @@ -25,7 +25,7 @@ env_vars: { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "nodejs-bigquery-storage/.kokoro/trampoline_v2.sh" +build_file: "nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -35,7 +35,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-bigquery-storage/.kokoro/publish.sh" + value: "github/nodejs-bigquery-storage/handwritten/bigquery-storage/.kokoro/publish.sh" } # Store the packages we uploaded to npmjs.org and their corresponding diff --git a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh index 5d6cfcca528..92312d4d09c 100755 --- a/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh +++ b/handwritten/bigquery-storage/.kokoro/trampoline_v2.sh @@ -246,14 +246,34 @@ function repo_root() { if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then PROGRAM_PATH="$(realpath "$0")" PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")/handwritten/bigquery-storage" else - PROJECT_ROOT="$(repo_root $(pwd))" + PROJECT_ROOT="$(repo_root $(pwd))/handwritten/bigquery-storage" fi log_yellow "Changing to the project root: ${PROJECT_ROOT}." cd "${PROJECT_ROOT}" +# Auto-injected conditional check +# Check if the package directory has changes. If not, skip tests. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # The package path is hardcoded during migration + RELATIVE_PKG_PATH="handwritten/bigquery-storage" + + echo "Checking for changes in ${RELATIVE_PKG_PATH}..." + + # Determine the diff range based on the CI system/event + # Safe default: HEAD~1..HEAD + DIFF_RANGE="HEAD~1..HEAD" + + if git diff --quiet "${DIFF_RANGE}" -- "${RELATIVE_PKG_PATH}"; then + echo "No changes detected in ${RELATIVE_PKG_PATH}. Skipping tests." + exit 0 + else + echo "Changes detected in ${RELATIVE_PKG_PATH}. Proceeding with tests." + fi +fi + # To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need # to use this environment variable in `PROJECT_ROOT`. if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then diff --git a/handwritten/bigquery-storage/.repo-metadata.json b/handwritten/bigquery-storage/.repo-metadata.json index a85a5e5bc0f..9efcadddc93 100644 --- a/handwritten/bigquery-storage/.repo-metadata.json +++ b/handwritten/bigquery-storage/.repo-metadata.json @@ -2,7 +2,7 @@ "distribution_name": "@google-cloud/bigquery-storage", "release_level": "stable", "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage", - "repo": "googleapis/nodejs-bigquery-storage", + "repo": "googleapis/google-cloud-node", "default_version": "v1", "language": "nodejs", "requires_billing": true, diff --git a/handwritten/bigquery-storage/.trampolinerc b/handwritten/bigquery-storage/.trampolinerc index 5fc2253137d..7a2bd550b02 100644 --- a/handwritten/bigquery-storage/.trampolinerc +++ b/handwritten/bigquery-storage/.trampolinerc @@ -49,4 +49,4 @@ if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then fi # Secret Manager secrets. -source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh +source ${PROJECT_ROOT}/handwritten/bigquery-storage/.kokoro/populate-secrets.sh diff --git a/handwritten/bigquery-storage/owlbot.py b/handwritten/bigquery-storage/owlbot.py index 918849d1a7d..0059e73a001 100644 --- a/handwritten/bigquery-storage/owlbot.py +++ b/handwritten/bigquery-storage/owlbot.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. """This script is used to synthesize generated parts of this library.""" -import synthtool.languages.node as node +import synthtool.languages.node_mono_repo as node -node.owlbot_main( +node.owlbot_main(relative_dir="handwritten/bigquery-storage", staging_excludes=['package.json', 'README.md', 'src/index.ts'], templates_excludes=['src/index.ts','.OwlBot.yaml'] ) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 8838d09c56f..3019f375f53 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -2,7 +2,11 @@ "name": "@google-cloud/bigquery-storage", "version": "5.1.0", "description": "Client for the BigQuery Storage API", - "repository": "googleapis/nodejs-bigquery-storage", + "repository": { + "type": "git", + "directory": "handwritten/bigquery-storage", + "url": "https://github.com/googleapis/google-cloud-node.git" + }, "license": "Apache-2.0", "author": "Google LLC", "files": [ @@ -34,7 +38,6 @@ "extend": "^3.0.2", "google-auth-library": "^10.0.0", "google-gax": "^5.0.0" - }, "peerDependencies": { "protobufjs": "^7.2.4 - 7.5.0" @@ -67,5 +70,6 @@ }, "engines": { "node": ">=18" - } + }, + "homepage": "https://github.com/googleapis/google-cloud-node/tree/main/handwritten/bigquery-storage" } diff --git a/release-please-submodules.json b/release-please-submodules.json index 31841c0d0fd..a3f345c3fea 100644 --- a/release-please-submodules.json +++ b/release-please-submodules.json @@ -1,16 +1,11 @@ { - "release-type": "node", - "separate-pull-requests": true, "commit-batch-size": 1, "include-component-in-tag": true, - "tag-separator": "-", "packages": { - "handwritten/logging-winston": { - "component": "logging-winston" - }, "handwritten/bigquery": { "component": "bigquery" }, + "handwritten/bigquery-storage": {}, "handwritten/cloud-profiler": { "component": "cloud-profiler" }, @@ -19,11 +14,17 @@ }, "handwritten/logging-bunyan": { "component": "logging-bunyan" + }, + "handwritten/logging-winston": { + "component": "logging-winston" } }, "plugins": [ { "type": "sentence-case" } - ] + ], + "release-type": "node", + "separate-pull-requests": true, + "tag-separator": "-" } From 0bdad349b9297d3ee3648d3adf857b7398eb6fcb Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 25 Feb 2026 22:10:23 +0000 Subject: [PATCH 330/333] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- handwritten/bigquery-storage/.eslintignore | 1 - handwritten/bigquery-storage/.mocharc.js | 2 +- handwritten/bigquery-storage/.prettierrc.js | 2 +- handwritten/bigquery-storage/README.md | 281 ++--------- .../bigquery-storage/samples/README.md | 464 ++++++++++++++++++ 5 files changed, 500 insertions(+), 250 deletions(-) create mode 100644 handwritten/bigquery-storage/samples/README.md diff --git a/handwritten/bigquery-storage/.eslintignore b/handwritten/bigquery-storage/.eslintignore index c4a0963e9bd..ea5b04aebe6 100644 --- a/handwritten/bigquery-storage/.eslintignore +++ b/handwritten/bigquery-storage/.eslintignore @@ -5,4 +5,3 @@ build/ docs/ protos/ samples/generated/ -system-test/**/fixtures diff --git a/handwritten/bigquery-storage/.mocharc.js b/handwritten/bigquery-storage/.mocharc.js index 0b600509bed..2431859019f 100644 --- a/handwritten/bigquery-storage/.mocharc.js +++ b/handwritten/bigquery-storage/.mocharc.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2026 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/.prettierrc.js b/handwritten/bigquery-storage/.prettierrc.js index d1b95106f4c..d2eddc2ed89 100644 --- a/handwritten/bigquery-storage/.prettierrc.js +++ b/handwritten/bigquery-storage/.prettierrc.js @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2026 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/handwritten/bigquery-storage/README.md b/handwritten/bigquery-storage/README.md index 97782baa3c9..3aeee8fc22c 100644 --- a/handwritten/bigquery-storage/README.md +++ b/handwritten/bigquery-storage/README.md @@ -2,128 +2,23 @@ [//]: # "To regenerate it, use `python -m synthtool`." Google Cloud Platform logo -# [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/nodejs-bigquery-storage) +# [Google BigQuery Storage: Node.js Client](https://github.com/googleapis/google-cloud-node/tree/main/handwritten/bigquery-storage) [![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) -[![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.org/package/@google-cloud/bigquery-storage) - - - - -> Node.js idiomatic client for [BigQuery Storage](https://cloud.google.com/bigquery). - -The BigQuery Storage product is divided into two major APIs: Write and Read API. -BigQuery Storage API does not provide functionality related to managing BigQuery -resources such as datasets, jobs, or tables. - -The BigQuery Storage Write API is a unified data-ingestion API for BigQuery. -It combines streaming ingestion and batch loading into a single high-performance API. -You can use the Storage Write API to stream records into BigQuery in real time or -to batch process an arbitrarily large number of records and commit them in a single -atomic operation. - -Read more in our [introduction guide](https://cloud.google.com/bigquery/docs/write-api). - -Using a system provided default stream, this code sample demonstrates using the -schema of a destination stream/table to construct a writer, and send several -batches of row data to the table. - -```javascript -const {adapt, managedwriter} = require('@google-cloud/bigquery-storage'); -const {WriterClient, JSONWriter} = managedwriter; - -async function appendJSONRowsDefaultStream() { - const projectId = 'my_project'; - const datasetId = 'my_dataset'; - const tableId = 'my_table'; - - const destinationTable = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; - const writeClient = new WriterClient({projectId}); - - try { - const writeStream = await writeClient.getWriteStream({ - streamId: `${destinationTable}/streams/_default`, - view: 'FULL' - }); - const protoDescriptor = adapt.convertStorageSchemaToProto2Descriptor( - writeStream.tableSchema, - 'root' - ); - - const connection = await writeClient.createStreamConnection({ - streamId: managedwriter.DefaultStream, - destinationTable, - }); - const streamId = connection.getStreamId(); - - const writer = new JSONWriter({ - streamId, - connection, - protoDescriptor, - }); - - let rows = []; - const pendingWrites = []; - - // Row 1 - let row = { - row_num: 1, - customer_name: 'Octavia', - }; - rows.push(row); - - // Row 2 - row = { - row_num: 2, - customer_name: 'Turing', - }; - rows.push(row); - - // Send batch. - let pw = writer.appendRows(rows); - pendingWrites.push(pw); - - rows = []; - - // Row 3 - row = { - row_num: 3, - customer_name: 'Bell', - }; - rows.push(row); - - // Send batch. - pw = writer.appendRows(rows); - pendingWrites.push(pw); - - const results = await Promise.all( - pendingWrites.map(pw => pw.getResult()) - ); - console.log('Write results:', results); - } catch (err) { - console.log(err); - } finally { - writeClient.close(); - } -} -``` +[![npm version](https://img.shields.io/npm/v/@google-cloud/bigquery-storage.svg)](https://www.npmjs.com/package/@google-cloud/bigquery-storage) + -The BigQuery Storage Read API provides fast access to BigQuery-managed storage by -using an gRPC based protocol. When you use the Storage Read API, structured data is -sent over the wire in a binary serialization format. This allows for additional -parallelism among multiple consumers for a set of results. -Read more how to [use the BigQuery Storage Read API](https://cloud.google.com/bigquery/docs/reference/storage). -See sample code on the [Quickstart section](#quickstart). +Client for the BigQuery Storage API A comprehensive list of changes in each version may be found in -[the CHANGELOG](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CHANGELOG.md). +[the CHANGELOG](https://github.com/googleapis/google-cloud-node/tree/main/handwritten/bigquery-storage/CHANGELOG.md). * [Google BigQuery Storage Node.js Client API Reference][client-docs] * [Google BigQuery Storage Documentation][product-docs] -* [github.com/googleapis/nodejs-bigquery-storage](https://github.com/googleapis/nodejs-bigquery-storage) +* [github.com/googleapis/google-cloud-node/handwritten/bigquery-storage](https://github.com/googleapis/google-cloud-node/tree/main/handwritten/bigquery-storage) Read more about the client libraries for Cloud APIs, including the older Google APIs Client Libraries, in [Client Libraries Explained][explained]. @@ -136,7 +31,7 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained]. * [Quickstart](#quickstart) * [Before you begin](#before-you-begin) * [Installing the client library](#installing-the-client-library) - * [Using the client library](#using-the-client-library) + * [Samples](#samples) * [Versioning](#versioning) * [Contributing](#contributing) @@ -159,146 +54,38 @@ npm install @google-cloud/bigquery-storage ``` -### Using the client library - -```javascript - -// The read stream contains blocks of Avro-encoded bytes. We use the -// 'avsc' library to decode these blocks. Install avsc with the following -// command: npm install avsc -const avro = require('avsc'); - -// See reference documentation at -// https://cloud.google.com/bigquery/docs/reference/storage -const {BigQueryReadClient} = require('@google-cloud/bigquery-storage'); - -const client = new BigQueryReadClient(); - -async function bigqueryStorageQuickstart() { - // Get current project ID. The read session is created in this project. - // This project can be different from that which contains the table. - const myProjectId = await client.getProjectId(); - - // This example reads baby name data from the public datasets. - const projectId = 'bigquery-public-data'; - const datasetId = 'usa_names'; - const tableId = 'usa_1910_current'; - - const tableReference = `projects/${projectId}/datasets/${datasetId}/tables/${tableId}`; - - const parent = `projects/${myProjectId}`; - - /* We limit the output columns to a subset of those allowed in the table, - * and set a simple filter to only report names from the state of - * Washington (WA). - */ - const readOptions = { - selectedFields: ['name', 'number', 'state'], - rowRestriction: 'state = "WA"', - }; - - let tableModifiers = null; - const snapshotSeconds = 0; - - // Set a snapshot time if it's been specified. - if (snapshotSeconds > 0) { - tableModifiers = {snapshotTime: {seconds: snapshotSeconds}}; - } - - // API request. - const request = { - parent, - readSession: { - table: tableReference, - // This API can also deliver data serialized in Apache Arrow format. - // This example leverages Apache Avro. - dataFormat: 'AVRO', - readOptions, - tableModifiers, - }, - }; - - const [session] = await client.createReadSession(request); - - const schema = JSON.parse(session.avroSchema.schema); - - const avroType = avro.Type.forSchema(schema); - - /* The offset requested must be less than the last - * row read from ReadRows. Requesting a larger offset is - * undefined. - */ - let offset = 0; - - const readRowsRequest = { - // Required stream name and optional offset. Offset requested must be less than - // the last row read from readRows(). Requesting a larger offset is undefined. - readStream: session.streams[0].name, - offset, - }; - - const names = new Set(); - const states = []; - - /* We'll use only a single stream for reading data from the table. Because - * of dynamic sharding, this will yield all the rows in the table. However, - * if you wanted to fan out multiple readers you could do so by having a - * reader process each individual stream. - */ - client - .readRows(readRowsRequest) - .on('error', console.error) - .on('data', data => { - offset = data.avroRows.serializedBinaryRows.offset; - - try { - // Decode all rows in buffer - let pos; - do { - const decodedData = avroType.decode( - data.avroRows.serializedBinaryRows, - pos, - ); - - if (decodedData.value) { - names.add(decodedData.value.name); - - if (!states.includes(decodedData.value.state)) { - states.push(decodedData.value.state); - } - } - - pos = decodedData.offset; - } while (pos > 0); - } catch (error) { - console.log(error); - } - }) - .on('end', () => { - console.log(`Got ${names.size} unique names in states: ${states}`); - console.log(`Last offset: ${offset}`); - }); -} - -``` - ## Samples -Samples are in the [`samples/`](https://github.com/googleapis/nodejs-bigquery-storage/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. +Samples are in the [`samples/`](https://github.com/googleapis/google-cloud-node/tree/main/handwritten/bigquery-storage/samples) directory. Each sample's `README.md` has instructions for running its sample. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | -| Append_rows_buffered | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_buffered.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_buffered.js,samples/README.md) | -| Append_rows_json_writer_committed | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_committed.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_committed.js,samples/README.md) | -| Append_rows_json_writer_default | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_json_writer_default.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_json_writer_default.js,samples/README.md) | -| Append_rows_pending | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_pending.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_pending.js,samples/README.md) | -| Append_rows_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_proto2.js,samples/README.md) | -| Append_rows_table_to_proto2 | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/append_rows_table_to_proto2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/append_rows_table_to_proto2.js,samples/README.md) | -| Customer_record_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/customer_record_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/customer_record_pb.js,samples/README.md) | -| BigQuery Storage Quickstart | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | -| Sample_data_pb | [source code](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/samples/sample_data_pb.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery-storage&page=editor&open_in_editor=samples/sample_data_pb.js,samples/README.md) | +| Big_query_read.create_read_session | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_read.read_rows | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_read.split_read_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.append_rows | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.batch_commit_write_streams | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.create_write_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.finalize_write_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.flush_rows | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_write.get_write_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_create_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_delete_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_update_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.list_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.stream_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_create_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_delete_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.batch_update_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.list_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Metastore_partition_service.stream_metastore_partitions | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_storage.batch_create_read_session_streams | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_storage.create_read_session | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_storage.finalize_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_storage.read_rows | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js,handwritten/bigquery-storage/samples/README.md) | +| Big_query_storage.split_read_stream | [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js,handwritten/bigquery-storage/samples/README.md) | @@ -348,7 +135,7 @@ More Information: [Google Cloud Platform Launch Stages][launch_stages] ## Contributing -Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/CONTRIBUTING.md). +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-cloud-node/blob/main/CONTRIBUTING.md). Please note that this `README.md`, the `samples/README.md`, and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) @@ -360,7 +147,7 @@ to its templates in Apache Version 2.0 -See [LICENSE](https://github.com/googleapis/nodejs-bigquery-storage/blob/main/LICENSE) +See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/LICENSE) [client-docs]: https://cloud.google.com/nodejs/docs/reference/bigquery-storage/latest [product-docs]: https://cloud.google.com/bigquery/docs/reference/storage diff --git a/handwritten/bigquery-storage/samples/README.md b/handwritten/bigquery-storage/samples/README.md new file mode 100644 index 00000000000..6568c068d30 --- /dev/null +++ b/handwritten/bigquery-storage/samples/README.md @@ -0,0 +1,464 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google BigQuery Storage: Node.js Samples](https://github.com/googleapis/google-cloud-node) + +[![Open in Cloud Shell][shell_img]][shell_link] + + + +## Table of Contents + +* [Before you begin](#before-you-begin) +* [Samples](#samples) + * [Big_query_read.create_read_session](#big_query_read.create_read_session) + * [Big_query_read.read_rows](#big_query_read.read_rows) + * [Big_query_read.split_read_stream](#big_query_read.split_read_stream) + * [Big_query_write.append_rows](#big_query_write.append_rows) + * [Big_query_write.batch_commit_write_streams](#big_query_write.batch_commit_write_streams) + * [Big_query_write.create_write_stream](#big_query_write.create_write_stream) + * [Big_query_write.finalize_write_stream](#big_query_write.finalize_write_stream) + * [Big_query_write.flush_rows](#big_query_write.flush_rows) + * [Big_query_write.get_write_stream](#big_query_write.get_write_stream) + * [Metastore_partition_service.batch_create_metastore_partitions](#metastore_partition_service.batch_create_metastore_partitions) + * [Metastore_partition_service.batch_delete_metastore_partitions](#metastore_partition_service.batch_delete_metastore_partitions) + * [Metastore_partition_service.batch_update_metastore_partitions](#metastore_partition_service.batch_update_metastore_partitions) + * [Metastore_partition_service.list_metastore_partitions](#metastore_partition_service.list_metastore_partitions) + * [Metastore_partition_service.stream_metastore_partitions](#metastore_partition_service.stream_metastore_partitions) + * [Metastore_partition_service.batch_create_metastore_partitions](#metastore_partition_service.batch_create_metastore_partitions) + * [Metastore_partition_service.batch_delete_metastore_partitions](#metastore_partition_service.batch_delete_metastore_partitions) + * [Metastore_partition_service.batch_update_metastore_partitions](#metastore_partition_service.batch_update_metastore_partitions) + * [Metastore_partition_service.list_metastore_partitions](#metastore_partition_service.list_metastore_partitions) + * [Metastore_partition_service.stream_metastore_partitions](#metastore_partition_service.stream_metastore_partitions) + * [Big_query_storage.batch_create_read_session_streams](#big_query_storage.batch_create_read_session_streams) + * [Big_query_storage.create_read_session](#big_query_storage.create_read_session) + * [Big_query_storage.finalize_stream](#big_query_storage.finalize_stream) + * [Big_query_storage.read_rows](#big_query_storage.read_rows) + * [Big_query_storage.split_read_stream](#big_query_storage.split_read_stream) + +## Before you begin + +Before running the samples, make sure you've followed the steps outlined in +[Using the client library](https://github.com/googleapis/google-cloud-node#using-the-client-library). + +`cd samples` + +`npm install` + +`cd ..` + +## Samples + + + +### Big_query_read.create_read_session + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_read.create_read_session.js` + + +----- + + + + +### Big_query_read.read_rows + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_read.read_rows.js` + + +----- + + + + +### Big_query_read.split_read_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_read.split_read_stream.js` + + +----- + + + + +### Big_query_write.append_rows + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.append_rows.js` + + +----- + + + + +### Big_query_write.batch_commit_write_streams + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.batch_commit_write_streams.js` + + +----- + + + + +### Big_query_write.create_write_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.create_write_stream.js` + + +----- + + + + +### Big_query_write.finalize_write_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.finalize_write_stream.js` + + +----- + + + + +### Big_query_write.flush_rows + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.flush_rows.js` + + +----- + + + + +### Big_query_write.get_write_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1/big_query_write.get_write_stream.js` + + +----- + + + + +### Metastore_partition_service.batch_create_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_create_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.batch_delete_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_delete_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.batch_update_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.batch_update_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.list_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.list_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.stream_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1alpha/metastore_partition_service.stream_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.batch_create_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_create_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.batch_delete_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_delete_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.batch_update_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.batch_update_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.list_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.list_metastore_partitions.js` + + +----- + + + + +### Metastore_partition_service.stream_metastore_partitions + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta/metastore_partition_service.stream_metastore_partitions.js` + + +----- + + + + +### Big_query_storage.batch_create_read_session_streams + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.batch_create_read_session_streams.js` + + +----- + + + + +### Big_query_storage.create_read_session + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.create_read_session.js` + + +----- + + + + +### Big_query_storage.finalize_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.finalize_stream.js` + + +----- + + + + +### Big_query_storage.read_rows + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.read_rows.js` + + +----- + + + + +### Big_query_storage.split_read_stream + +View the [source code](https://github.com/googleapis/google-cloud-node/blob/main/handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js,samples/README.md) + +__Usage:__ + + +`node handwritten/bigquery-storage/samples/generated/v1beta1/big_query_storage.split_read_stream.js` + + + + + + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=samples/README.md +[product-docs]: https://cloud.google.com/bigquery/docs/reference/storage From 6f2be1148aff74a4cc9b72dacf74db30fe5c1f5f Mon Sep 17 00:00:00 2001 From: Gautam Sharda Date: Thu, 26 Feb 2026 18:31:31 +0000 Subject: [PATCH 331/333] chore: fix invalid pnpm peer dependency range for protobufjs --- handwritten/bigquery-storage/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 3019f375f53..525610dc3cb 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -40,7 +40,7 @@ "google-gax": "^5.0.0" }, "peerDependencies": { - "protobufjs": "^7.2.4 - 7.5.0" + "protobufjs": "7.2.4 - 7.5.0" }, "devDependencies": { "@google-cloud/bigquery": "^8.0.0", From 0a1252493969d233e61882c7b91121a688b61a79 Mon Sep 17 00:00:00 2001 From: Gautam Sharda Date: Fri, 27 Feb 2026 00:42:03 +0000 Subject: [PATCH 332/333] chore: add long as dep --- handwritten/bigquery-storage/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/handwritten/bigquery-storage/package.json b/handwritten/bigquery-storage/package.json index 525610dc3cb..f0b7ef38830 100644 --- a/handwritten/bigquery-storage/package.json +++ b/handwritten/bigquery-storage/package.json @@ -37,7 +37,8 @@ "core-js": "^3.41.0", "extend": "^3.0.2", "google-auth-library": "^10.0.0", - "google-gax": "^5.0.0" + "google-gax": "^5.0.0", + "long": "^5.2.0" }, "peerDependencies": { "protobufjs": "7.2.4 - 7.5.0" From d6d63730f15eed7cdb364ef7fe5abd583a90cff7 Mon Sep 17 00:00:00 2001 From: Gautam Sharda Date: Sat, 28 Feb 2026 00:45:31 +0000 Subject: [PATCH 333/333] chore: migrate test fixtures and update paths --- .../system-test/fixtures/customer_record.json | 17 +++ .../system-test/managed_writer_client_test.ts | 2 +- .../system-test/reader_client_test.ts | 2 +- .../bigquery-storage/test/adapt/proto.ts | 2 +- .../test/testdata/message.json | 143 ++++++++++++++++++ handwritten/bigquery-storage/tsconfig.json | 2 + 6 files changed, 165 insertions(+), 3 deletions(-) create mode 100644 handwritten/bigquery-storage/system-test/fixtures/customer_record.json create mode 100644 handwritten/bigquery-storage/test/testdata/message.json diff --git a/handwritten/bigquery-storage/system-test/fixtures/customer_record.json b/handwritten/bigquery-storage/system-test/fixtures/customer_record.json new file mode 100644 index 00000000000..e60aafdb8ef --- /dev/null +++ b/handwritten/bigquery-storage/system-test/fixtures/customer_record.json @@ -0,0 +1,17 @@ +{ + "nested": { + "CustomerRecord": { + "fields": { + "customer_name": { + "type": "string", + "id": 1 + }, + "row_num": { + "rule": "required", + "type": "int64", + "id": 2 + } + } + } + } +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts index f1442e786c1..a2c80746db1 100644 --- a/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts +++ b/handwritten/bigquery-storage/system-test/managed_writer_client_test.ts @@ -24,7 +24,7 @@ import * as protos from '../protos/protos'; import * as bigquerywriter from '../src'; import * as protobuf from 'protobufjs'; import {ClientOptions} from 'google-gax'; -import * as customerRecordProtoJson from '../samples/customer_record.json'; +import * as customerRecordProtoJson from './fixtures/customer_record.json'; import {JSONEncoder} from '../src/managedwriter/encoder'; import {PendingWrite} from '../src/managedwriter/pending_write'; import {PreciseDate} from '@google-cloud/precise-date'; diff --git a/handwritten/bigquery-storage/system-test/reader_client_test.ts b/handwritten/bigquery-storage/system-test/reader_client_test.ts index 0363f34789a..3f08c16fa7a 100644 --- a/handwritten/bigquery-storage/system-test/reader_client_test.ts +++ b/handwritten/bigquery-storage/system-test/reader_client_test.ts @@ -21,7 +21,7 @@ import {BigQuery, TableRow, TableSchema} from '@google-cloud/bigquery'; import * as protos from '../protos/protos'; import * as protobuf from 'protobufjs'; import {ClientOptions} from 'google-gax'; -import * as customerRecordProtoJson from '../samples/customer_record.json'; +import * as customerRecordProtoJson from './fixtures/customer_record.json'; import * as bigquerystorage from '../src'; import * as reader from '../src/reader'; import {cleanupDatasets} from './util'; diff --git a/handwritten/bigquery-storage/test/adapt/proto.ts b/handwritten/bigquery-storage/test/adapt/proto.ts index c1bf2fe8eaa..3a871624c56 100644 --- a/handwritten/bigquery-storage/test/adapt/proto.ts +++ b/handwritten/bigquery-storage/test/adapt/proto.ts @@ -16,7 +16,7 @@ import * as assert from 'assert'; import {describe, it} from 'mocha'; import * as protobuf from 'protobufjs'; import * as adapt from '../../src/adapt'; -import * as messagesJSON from '../../samples/testdata/messages.json'; +import * as messagesJSON from '../testdata/message.json'; import * as protos from '../../protos/protos'; type TableFieldSchema = diff --git a/handwritten/bigquery-storage/test/testdata/message.json b/handwritten/bigquery-storage/test/testdata/message.json new file mode 100644 index 00000000000..a41c2b2ffd6 --- /dev/null +++ b/handwritten/bigquery-storage/test/testdata/message.json @@ -0,0 +1,143 @@ +{ + "nested": { + "testdata": { + "nested": { + "SimpleMessage": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "value": { + "type": "int64", + "id": 2 + }, + "other": { + "type": "string", + "id": 3 + } + } + }, + "GithubArchiveEntity": { + "fields": { + "id": { + "type": "int64", + "id": 1 + }, + "login": { + "type": "string", + "id": 2 + }, + "gravatar_id": { + "type": "string", + "id": 3 + }, + "avatar_url": { + "type": "string", + "id": 4 + }, + "url": { + "type": "string", + "id": 5 + } + } + }, + "GithubArchiveRepo": { + "fields": { + "id": { + "type": "int64", + "id": 1 + }, + "name": { + "type": "string", + "id": 2 + }, + "url": { + "type": "string", + "id": 3 + } + } + }, + "GithubArchiveMessage": { + "fields": { + "type": { + "type": "string", + "id": 1 + }, + "public": { + "type": "bool", + "id": 2 + }, + "payload": { + "type": "string", + "id": 3 + }, + "repo": { + "type": "GithubArchiveRepo", + "id": 4 + }, + "actor": { + "type": "GithubArchiveEntity", + "id": 5 + }, + "org": { + "type": "GithubArchiveEntity", + "id": 6 + }, + "created_at": { + "type": "int64", + "id": 7 + }, + "id": { + "type": "string", + "id": 8 + }, + "other": { + "type": "string", + "id": 9 + } + } + }, + "ExtEnum": { + "values": { + "UNDEFINED": 0, + "THING": 1, + "OTHER_THING": 2 + } + }, + "ExternalEnumMessage": { + "fields": { + "msg_a": { + "type": "EnumMsgA", + "id": 1 + }, + "msg_b": { + "type": "EnumMsgB", + "id": 2 + } + } + }, + "EnumMsgA": { + "fields": { + "foo": { + "type": "string", + "id": 1 + }, + "bar": { + "type": "ExtEnum", + "id": 2 + } + } + }, + "EnumMsgB": { + "fields": { + "baz": { + "type": "ExtEnum", + "id": 1 + } + } + } + } + } + } +} \ No newline at end of file diff --git a/handwritten/bigquery-storage/tsconfig.json b/handwritten/bigquery-storage/tsconfig.json index ca73e7bfc82..ee0702dcdef 100644 --- a/handwritten/bigquery-storage/tsconfig.json +++ b/handwritten/bigquery-storage/tsconfig.json @@ -14,7 +14,9 @@ "src/**/*.ts", "test/*.ts", "test/**/*.ts", + "test/testdata/message.json", "system-test/*.ts", + "system-test/fixtures/customer_record.json", "src/**/*.json", "samples/**/*.json", "protos/protos.json"